JetPack 4.6.1 LSTM Attribure Error 'strategy_supports_no_merge_call'

I have used a similar LSTM model on previous Jetpack versions but for some reason I cant run model.fit() as I get the following error message.

I am very new to data science and I dont understand what this error message means. I will really appreciate if anyone can help. Thanks in advance!

AttributeError: module 'tensorflow.compat.v2.__internal__.distribute' has no attribute 'strategy_supports_no_merge_call'

Full Log:

Epoch 1/100
---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-21-18fefb168cfe> in <module>
----> 1 regressor.fit(X_train, y_train, epochs = 100, batch_size = 32)

/usr/local/lib/python3.6/dist-packages/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
   1382                 _r=1):
   1383               callbacks.on_train_batch_begin(step)
-> 1384               tmp_logs = self.train_function(iterator)
   1385               if data_handler.should_sync:
   1386                 context.async_wait()

/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
    908 
    909       with OptionalXlaContext(self._jit_compile):
--> 910         result = self._call(*args, **kwds)
    911 
    912       new_tracing_count = self.experimental_get_tracing_count()

/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
    956       # This is the first call of __call__, so we have to initialize.
    957       initializers = []
--> 958       self._initialize(args, kwds, add_initializers_to=initializers)
    959     finally:
    960       # At this point we know that the initialization is complete (or less

/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
    779     self._concrete_stateful_fn = (
    780         self._stateful_fn._get_concrete_function_internal_garbage_collected(  # pylint: disable=protected-access
--> 781             *args, **kwds))
    782 
    783     def invalid_creator_scope(*unused_args, **unused_kwds):

/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
   3155       args, kwargs = None, None
   3156     with self._lock:
-> 3157       graph_function, _ = self._maybe_define_function(args, kwargs)
   3158     return graph_function
   3159 

/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
   3555 
   3556           self._function_cache.missed.add(call_context_key)
-> 3557           graph_function = self._create_graph_function(args, kwargs)
   3558           self._function_cache.primary[cache_key] = graph_function
   3559 

/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
   3400             arg_names=arg_names,
   3401             override_flat_arg_shapes=override_flat_arg_shapes,
-> 3402             capture_by_value=self._capture_by_value),
   3403         self._function_attributes,
   3404         function_spec=self.function_spec,

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes, acd_record_initial_resource_uses)
   1141         _, original_func = tf_decorator.unwrap(python_func)
   1142 
-> 1143       func_outputs = python_func(*func_args, **func_kwargs)
   1144 
   1145       # invariant: `func_outputs` contains only Tensors, CompositeTensors,

/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
    670         # the function a weak reference to itself to avoid a reference cycle.
    671         with OptionalXlaContext(compile_with_xla):
--> 672           out = weak_wrapped_fn().__wrapped__(*args, **kwds)
    673         return out
    674 

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py in autograph_handler(*args, **kwargs)
   1127           except Exception as e:  # pylint:disable=broad-except
   1128             if hasattr(e, "ag_error_metadata"):
-> 1129               raise e.ag_error_metadata.to_exception(e)
   1130             else:
   1131               raise

AttributeError: in user code:

    File "/usr/local/lib/python3.6/dist-packages/keras/engine/training.py", line 1021, in train_function  *
        return step_function(self, iterator)
    File "/usr/local/lib/python3.6/dist-packages/keras/engine/training.py", line 1010, in step_function  **
        outputs = model.distribute_strategy.run(run_step, args=(data,))
    File "/usr/local/lib/python3.6/dist-packages/keras/engine/training.py", line 1000, in run_step  **
        outputs = model.train_step(data)
    File "/usr/local/lib/python3.6/dist-packages/keras/engine/training.py", line 863, in train_step
        self.optimizer.minimize(loss, self.trainable_variables, tape=tape)
    File "/usr/local/lib/python3.6/dist-packages/keras/optimizer_v2/optimizer_v2.py", line 532, in minimize
        return self.apply_gradients(grads_and_vars, name=name)
    File "/usr/local/lib/python3.6/dist-packages/keras/optimizer_v2/optimizer_v2.py", line 668, in apply_gradients
        grads_and_vars = self._aggregate_gradients(grads_and_vars)
    File "/usr/local/lib/python3.6/dist-packages/keras/optimizer_v2/optimizer_v2.py", line 484, in _aggregate_gradients
        return self.gradient_aggregator(grads_and_vars)
    File "/usr/local/lib/python3.6/dist-packages/keras/optimizer_v2/utils.py", line 33, in all_reduce_sum_gradients
        if tf.__internal__.distribute.strategy_supports_no_merge_call():

    AttributeError: module 'tensorflow.compat.v2.__internal__.distribute' has no attribute 'strategy_supports_no_merge_call'

Hi,

Based on the below link, it looks like a compatibility issue.

Which TensorFlow and Keras version do you use?
Could you try the version that is recommended in the solution?

Thanks.

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.