Dear everyone:
I’m new to tensorflow. The coding as follows:
def train_model(self):
checkpoint = ModelCheckpoint(self.PATH, monitor=‘val_loss’, verbose=1, save_best_only=True, mode=‘auto’)
if self.modality == “audio”:
model = self.get_audio_model()
model.compile(optimizer=‘adadelta’, loss=‘categorical_crossentropy’, sample_weight_mode=‘temporal’)
elif self.modality == “text”:
model = self.get_text_model()
model.compile(optimizer=‘adadelta’, loss=‘categorical_crossentropy’, sample_weight_mode=‘temporal’)
elif self.modality == “bimodal”:
model = self.get_bimodal_model()
model.compile(optimizer=‘adam’, loss=‘categorical_crossentropy’, sample_weight_mode=‘temporal’)
early_stopping = EarlyStopping(monitor=‘val_loss’, patience=10)
model.fit(self.train_x, self.train_y,
epochs=self.epochs,
batch_size=self.batch_size,
sample_weight=self.train_mask,
class_weight = {0:4.0, 1:15.0, 2:15.0, 3:3.0, 4:1.0, 5:6.0, 6:3.0},
shuffle=True,
callbacks=[early_stopping, checkpoint],
validation_data=(self.val_x, self.val_y, self.val_mask))
self.test_model()
To be honest, the class_weight = {0:4.0, 1:15.0, 2:15.0, 3:3.0, 4:1.0, 5:6.0, 6:3.0} was added by myself to adjust the class weight. However, it reported the error : ValueError: class_weight
not supported for 3+ dimensional targets.
The full of error as follows:
ValueError Traceback (most recent call last)
~\baseline.py in
288 model.test_model()
289 else:
→ 290 model.train_model()
~\baseline.py in train_model(self)
219
220 early_stopping = EarlyStopping(monitor=‘val_loss’, patience=10)
→ 221 model.fit(self.train_x, self.train_y,
222 epochs=self.epochs,
223 batch_size=self.batch_size,
F:\Anaconda\lib\site-packages\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1106 training_utils.RespectCompiledTrainableState(self):
1107 # Creates a tf.data.Dataset
and handles batch and epoch iteration.
→ 1108 data_handler = data_adapter.get_data_handler(
1109 x=x,
1110 y=y,
F:\Anaconda\lib\site-packages\keras\engine\data_adapter.py in get_data_handler(*args, **kwargs)
1346 if getattr(kwargs[“model”], “_cluster_coordinator”, None):
1347 return _ClusterCoordinatorDataHandler(*args, **kwargs)
→ 1348 return DataHandler(*args, **kwargs)
1349
1350
F:\Anaconda\lib\site-packages\keras\engine\data_adapter.py in init(self, x, y, sample_weight, batch_size, steps_per_epoch, initial_epoch, epochs, shuffle, class_weight, max_queue_size, workers, use_multiprocessing, model, steps_per_execution, distribute)
1156 self._insufficient_data = False
1157
→ 1158 self._configure_dataset_and_inferred_steps(strategy, x, steps_per_epoch,
1159 class_weight, distribute)
1160
F:\Anaconda\lib\site-packages\keras\engine\data_adapter.py in _configure_dataset_and_inferred_steps(failed resolving arguments)
1168 dataset = self._adapter.get_dataset()
1169 if class_weight:
→ 1170 dataset = dataset.map(_make_class_weight_map_fn(class_weight))
1171 self._inferred_steps = self._infer_steps(steps_per_epoch, dataset)
1172
F:\Anaconda\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in map(self, map_func, num_parallel_calls, deterministic)
1923 warnings.warn("The deterministic
argument has no effect unless the "
1924 “num_parallel_calls
argument is specified.”)
→ 1925 return MapDataset(self, map_func, preserve_cardinality=True)
1926 else:
1927 return ParallelMapDataset(
F:\Anaconda\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in init(self, input_dataset, map_func, use_inter_op_parallelism, preserve_cardinality, use_legacy_function)
4481 self._use_inter_op_parallelism = use_inter_op_parallelism
4482 self._preserve_cardinality = preserve_cardinality
→ 4483 self._map_func = StructuredFunctionWrapper(
4484 map_func,
4485 self._transformation_name(),
F:\Anaconda\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in init(self, func, transformation_name, dataset, input_classes, input_shapes, input_types, input_structure, add_to_graph, use_legacy_function, defun_kwargs)
3710 resource_tracker = tracking.ResourceTracker()
3711 with tracking.resource_tracker_scope(resource_tracker):
→ 3712 self._function = fn_factory()
3713 # There is no graph to add in eager mode.
3714 add_to_graph &= not context.executing_eagerly()
F:\Anaconda\lib\site-packages\tensorflow\python\eager\function.py in get_concrete_function(self, *args, **kwargs)
3132 or tf.Tensor
or tf.TensorSpec
.
3133 “”"
→ 3134 graph_function = self._get_concrete_function_garbage_collected(
3135 *args, **kwargs)
3136 graph_function._garbage_collector.release() # pylint: disable=protected-access
F:\Anaconda\lib\site-packages\tensorflow\python\eager\function.py in _get_concrete_function_garbage_collected(self, *args, **kwargs)
3098 args, kwargs = None, None
3099 with self._lock:
→ 3100 graph_function, _ = self._maybe_define_function(args, kwargs)
3101 seen_names = set()
3102 captured = object_identity.ObjectIdentitySet(
F:\Anaconda\lib\site-packages\tensorflow\python\eager\function.py in _maybe_define_function(self, args, kwargs)
3442
3443 self._function_cache.missed.add(call_context_key)
→ 3444 graph_function = self._create_graph_function(args, kwargs)
3445 self._function_cache.primary[cache_key] = graph_function
3446
F:\Anaconda\lib\site-packages\tensorflow\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3277 arg_names = base_arg_names + missing_arg_names
3278 graph_function = ConcreteFunction(
→ 3279 func_graph_module.func_graph_from_py_func(
3280 self._name,
3281 self._python_function,
F:\Anaconda\lib\site-packages\tensorflow\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
997 _, original_func = tf_decorator.unwrap(python_func)
998
→ 999 func_outputs = python_func(*func_args, **func_kwargs)
1000
1001 # invariant: func_outputs
contains only Tensors, CompositeTensors,
F:\Anaconda\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in wrapped_fn(*args)
3685 attributes=defun_kwargs)
3686 def wrapped_fn(*args): # pylint: disable=missing-docstring
→ 3687 ret = wrapper_helper(*args)
3688 ret = structure.to_tensor_list(self._output_structure, ret)
3689 return [ops.convert_to_tensor(t) for t in ret]
F:\Anaconda\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in wrapper_helper(*args)
3615 if not _should_unpack(nested_args):
3616 nested_args = (nested_args,)
→ 3617 ret = autograph.tf_convert(self._func, ag_ctx)(*nested_args)
3618 if _should_pack(ret):
3619 ret = tuple(ret)
F:\Anaconda\lib\site-packages\tensorflow\python\autograph\impl\api.py in wrapper(*args, **kwargs)
693 except Exception as e: # pylint:disable=broad-except
694 if hasattr(e, ‘ag_error_metadata’):
→ 695 raise e.ag_error_metadata.to_exception(e)
696 else:
697 raise
ValueError: in user code:
F:\Anaconda\lib\site-packages\keras\engine\data_adapter.py:1385 _class_weights_map_fn *
raise ValueError("`class_weight` not supported for "
ValueError: `class_weight` not supported for 3+ dimensional targets.
How to fix it? Thanks
Best wishes.
jiachen