I am trying to run tensorflow on my Jupyter notebook on Windows and am having trouble getting it to work. When trying to ‘import tensorflow as tf’, I get the following output:
TypeError Traceback (most recent call last)
Cell In[2], line 1
----> 1 import tensorflow as tf
2 print("TensorFlow version:", tf.__version__)
File ~\anaconda3\lib\site-packages\tensorflow\__init__.py:37
34 import sys as _sys
35 import typing as _typing
---> 37 from tensorflow.python.tools import module_util as _module_util
38 from tensorflow.python.util.lazy_loader import LazyLoader as _LazyLoader
40 # Make sure code inside the TensorFlow codebase can use tf2.enabled() at import.
File ~\anaconda3\lib\site-packages\tensorflow\python\__init__.py:42
37 from tensorflow.python.eager import context
39 # pylint: enable=wildcard-import
40
41 # Bring in subpackages.
---> 42 from tensorflow.python import data
43 from tensorflow.python import distribute
44 # from tensorflow.python import keras
File ~\anaconda3\lib\site-packages\tensorflow\python\data\__init__.py:21
15 """`tf.data.Dataset` API for input pipelines.
16
17 See [Importing Data](https://tensorflow.org/guide/data) for an overview.
18 """
20 # pylint: disable=unused-import
---> 21 from tensorflow.python.data import experimental
22 from tensorflow.python.data.ops.dataset_ops import AUTOTUNE
23 from tensorflow.python.data.ops.dataset_ops import Dataset
File ~\anaconda3\lib\site-packages\tensorflow\python\data\experimental\__init__.py:96
15 """Experimental API for building input pipelines.
16
17 This module contains experimental `Dataset` sources and transformations that can
(...)
92 @@UNKNOWN_CARDINALITY
93 """
95 # pylint: disable=unused-import
---> 96 from tensorflow.python.data.experimental import service
97 from tensorflow.python.data.experimental.ops.batching import dense_to_ragged_batch
98 from tensorflow.python.data.experimental.ops.batching import dense_to_sparse_batch
File ~\anaconda3\lib\site-packages\tensorflow\python\data\experimental\service\__init__.py:419
1 # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
(...)
13 # limitations under the License.
14 # ==============================================================================
15 """API for using the tf.data service.
16
17 This module contains:
(...)
416 job of ParameterServerStrategy).
417 """
--> 419 from tensorflow.python.data.experimental.ops.data_service_ops import distribute
420 from tensorflow.python.data.experimental.ops.data_service_ops import from_dataset_id
421 from tensorflow.python.data.experimental.ops.data_service_ops import register_dataset
File ~\anaconda3\lib\site-packages\tensorflow\python\data\experimental\ops\data_service_ops.py:22
20 from tensorflow.core.protobuf import data_service_pb2
21 from tensorflow.python import tf2
---> 22 from tensorflow.python.data.experimental.ops import compression_ops
23 from tensorflow.python.data.experimental.service import _pywrap_server_lib
24 from tensorflow.python.data.experimental.service import _pywrap_utils
File ~\anaconda3\lib\site-packages\tensorflow\python\data\experimental\ops\compression_ops.py:16
1 # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
(...)
13 # limitations under the License.
14 # ==============================================================================
15 """Ops for compressing and uncompressing dataset elements."""
---> 16 from tensorflow.python.data.util import structure
17 from tensorflow.python.ops import gen_experimental_dataset_ops as ged_ops
20 def compress(element):
File ~\anaconda3\lib\site-packages\tensorflow\python\data\util\structure.py:22
18 import itertools
20 import wrapt
---> 22 from tensorflow.python.data.util import nest
23 from tensorflow.python.framework import composite_tensor
24 from tensorflow.python.framework import ops
File ~\anaconda3\lib\site-packages\tensorflow\python\data\util\nest.py:34
1 # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
(...)
13 # limitations under the License.
14 # ==============================================================================
16 """## Functions for working with arbitrarily nested sequences of elements.
17
18 NOTE(mrry): This fork of the `tensorflow.python.util.nest` module
(...)
31 arrays.
32 """
---> 34 from tensorflow.python.framework import sparse_tensor as _sparse_tensor
35 from tensorflow.python.util import _pywrap_utils
36 from tensorflow.python.util import nest
File ~\anaconda3\lib\site-packages\tensorflow\python\framework\sparse_tensor.py:23
21 from tensorflow.python import pywrap_tensorflow # pylint: disable=unused-import
22 from tensorflow.python import tf2
---> 23 from tensorflow.python.framework import composite_tensor
24 from tensorflow.python.framework import constant_op
25 from tensorflow.python.framework import dtypes
File ~\anaconda3\lib\site-packages\tensorflow\python\framework\composite_tensor.py:21
19 from tensorflow.python import pywrap_tensorflow # pylint: disable=unused-import
20 from tensorflow.python.util import _pywrap_utils
---> 21 from tensorflow.python.util import nest
22 from tensorflow.python.util.tf_export import tf_export
25 @tf_export("__internal__.CompositeTensor", v1=[])
26 class CompositeTensor(metaclass=abc.ABCMeta):
File ~\anaconda3\lib\site-packages\tensorflow\python\util\nest.py:1735
1729 return _sequence_like(instance, args)
1731 return _pack_sequence_as(structure, flatten(structure), False,
1732 sequence_fn=sequence_fn)
-> 1735 _pywrap_utils.RegisterType("Mapping", _collections_abc.Mapping)
1736 _pywrap_utils.RegisterType("MutableMapping", _collections_abc.MutableMapping)
1737 _pywrap_utils.RegisterType("Sequence", _collections_abc.Sequence)
TypeError: Value already registered for Mapping
I haven’t found any similar issues on google and can’t find any prior instance of the value being registered for Mapping in nest.py.
For reference, my Tensorflow version is 2.11
I appreciate any help for this issue. Thanks in advance!