I am in the demo file of the Mask R CNN repo from matterport. Trying to run the first cell but encountering the following error. I have keras 2.3.0 installed. Running Python 3.8. The below is a trace of the error from the model.py file inside the Mask R CNN repo, which came with the clone. Thanks!
The repo referred to is here: https://github.com/matterport/Mask_RCNN.git
Thank you all for your kind support.
ModuleNotFoundError Traceback (most recent call last)
~\AppData\Local\Temp/ipykernel_3220/983756133.py in <module>
7 import matplotlib
8 import matplotlib.pyplot as plt
----> 9 import keras
10
11 # Root directory of the project
~\OneDrive\New Project\myenv1\lib\site-packages\keras\__init__.py in <module>
1 from __future__ import absolute_import
2
----> 3 from . import utils
4 from . import activations
5 from . import applications
~\OneDrive\New Project\myenv1\lib\site-packages\keras\utils\__init__.py in <module>
24 from .layer_utils import get_source_inputs
25 from .layer_utils import print_summary
---> 26 from .vis_utils import model_to_dot
27 from .vis_utils import plot_model
28 from .np_utils import to_categorical
~\OneDrive\New Project\myenv1\lib\site-packages\keras\utils\vis_utils.py in <module>
5
6 import os
----> 7 from ..models import Model
8 from ..layers.wrappers import Wrapper
9
~\OneDrive\New Project\myenv1\lib\site-packages\keras\models.py in <module>
10 from .engine.input_layer import Input
11 from .engine.input_layer import InputLayer
---> 12 from .engine.training import Model
13 from .engine.sequential import Sequential
14 from .engine.saving import save_model
~\OneDrive\New Project\myenv1\lib\site-packages\keras\engine\__init__.py in <module>
6 from .base_layer import Layer
7 from .network import get_source_inputs
----> 8 from .training import Model
~\OneDrive\New Project\myenv1\lib\site-packages\keras\engine\training.py in <module>
12 from .network import Network
13 from .base_layer import Layer
---> 14 from . import training_utils
15 from . import training_arrays
16 from . import training_generator
~\OneDrive\New Project\myenv1\lib\site-packages\keras\engine\training_utils.py in <module>
15 from .. import backend as K
16 from .. import losses
---> 17 from .. import metrics as metrics_module
18 from ..utils import Sequence
19 from ..utils import generic_utils
~\OneDrive\New Project\myenv1\lib\site-packages\keras\metrics.py in <module>
1848 import tensorflow as tf
1849 if tf.__version__ >= '2.0.0':
-> 1850 BaseMeanIoU = tf.keras.metrics.MeanIoU
1851
1852
~\OneDrive\New Project\myenv1\lib\site-packages\tensorflow\python\util\lazy_loader.py in __getattr__(self, item)
60
61 def __getattr__(self, item):
---> 62 module = self._load()
63 return getattr(module, item)
64
~\OneDrive\New Project\myenv1\lib\site-packages\tensorflow\python\util\lazy_loader.py in _load(self)
43 """Load the module and insert it into the parent's globals."""
44 # Import the target module and insert it into the parent's namespace
---> 45 module = importlib.import_module(self.__name__)
46 self._parent_module_globals[self._local_name] = module
47
~\AppData\Local\Programs\Python\Python38\lib\importlib\__init__.py in import_module(name, package)
125 break
126 level += 1
--> 127 return _bootstrap._gcd_import(name[level:], package, level)
128
129
ModuleNotFoundError: No module named 'keras.api'
I read the repo that you have referred. You maybe try run your code with environment below:
Python 3.4, TensorFlow 1.3, Keras 2.0.8
Related
I installed pytorch-lightning using pip, and I'm running on Mac.
I tried:
! pip install pytorch-lightning --upgrade
! pip install pytorch-lightning-bolts
(finished successfully)
and then:
import pytorch_lightning as pl
and what I get is:
--
-------------------------------------------------------------------------
ImportError Traceback (most recent call last)
<ipython-input-3-f3b4217dcea1> in <module>
7 from torchvision.datasets import MNIST
8 from torchvision import transforms
----> 9 import pytorch_lightning as pl
10 from pytorch_lightning.metrics.functional import accuracy
11 tmpdir = os.getcwd()
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/pytorch_lightning/__init__.py in <module>
60 # We are not importing the rest of the lightning during the build process, as it may not be compiled yet
61 else:
---> 62 from pytorch_lightning import metrics
63 from pytorch_lightning.callbacks import Callback
64 from pytorch_lightning.core import LightningDataModule, LightningModule
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/pytorch_lightning/metrics/__init__.py in <module>
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
---> 14 from pytorch_lightning.metrics.classification import ( # noqa: F401
15 Accuracy,
16 AUC,
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/pytorch_lightning/metrics/classification/__init__.py in <module>
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
---> 14 from pytorch_lightning.metrics.classification.accuracy import Accuracy # noqa: F401
15 from pytorch_lightning.metrics.classification.auc import AUC # noqa: F401
16 from pytorch_lightning.metrics.classification.auroc import AUROC # noqa: F401
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/pytorch_lightning/metrics/classification/accuracy.py in <module>
16 import torch
17
---> 18 from pytorch_lightning.metrics.functional.accuracy import _accuracy_compute, _accuracy_update
19 from pytorch_lightning.metrics.metric import Metric
20
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/pytorch_lightning/metrics/functional/__init__.py in <module>
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
---> 14 from pytorch_lightning.metrics.functional.accuracy import accuracy # noqa: F401
15 from pytorch_lightning.metrics.functional.auc import auc # noqa: F401
16 from pytorch_lightning.metrics.functional.auroc import auroc # noqa: F401
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/pytorch_lightning/metrics/functional/accuracy.py in <module>
16 import torch
17
---> 18 from pytorch_lightning.metrics.classification.helpers import _input_format_classification, DataType
19
20
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/pytorch_lightning/metrics/classification/helpers.py in <module>
17 import torch
18
---> 19 from pytorch_lightning.metrics.utils import select_topk, to_onehot
20 from pytorch_lightning.utilities import LightningEnum
21
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/pytorch_lightning/metrics/utils.py in <module>
16 import torch
17
---> 18 from pytorch_lightning.utilities import rank_zero_warn
19
20 METRIC_EPS = 1e-6
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/pytorch_lightning/utilities/__init__.py in <module>
16 import numpy
17
---> 18 from pytorch_lightning.utilities.apply_func import move_data_to_device # noqa: F401
19 from pytorch_lightning.utilities.distributed import ( # noqa: F401
20 AllGatherGrad,
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/pytorch_lightning/utilities/apply_func.py in <module>
23
24 from pytorch_lightning.utilities.exceptions import MisconfigurationException
---> 25 from pytorch_lightning.utilities.imports import _TORCHTEXT_AVAILABLE
26
27 if _TORCHTEXT_AVAILABLE:
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/pytorch_lightning/utilities/imports.py in <module>
54 _TORCH_GREATER_EQUAL_1_7 = _compare_version("torch", operator.ge, "1.7.0")
55 _TORCH_QUANTIZE_AVAILABLE = bool([eg for eg in torch.backends.quantized.supported_engines if eg != 'none'])
---> 56 _APEX_AVAILABLE = _module_available("apex.amp")
57 _BOLTS_AVAILABLE = _module_available('pl_bolts')
58 _DEEPSPEED_AVAILABLE = not _IS_WINDOWS and _module_available('deepspeed')
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/pytorch_lightning/utilities/imports.py in _module_available(module_path)
32 """
33 try:
---> 34 return find_spec(module_path) is not None
35 except AttributeError:
36 # Python 3.6
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/importlib/util.py in find_spec(name, package)
92 parent_name = fullname.rpartition('.')[0]
93 if parent_name:
---> 94 parent = __import__(parent_name, fromlist=['__path__'])
95 try:
96 parent_path = parent.__path__
/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/site-packages/apex/__init__.py in <module>
11 ISessionFactory)
12 from pyramid.security import NO_PERMISSION_REQUIRED
---> 13 from pyramid.session import UnencryptedCookieSessionFactoryConfig
14 from pyramid.settings import asbool
15
ImportError: cannot import name 'UnencryptedCookieSessionFactoryConfig' from 'pyramid.session' (unknown location
I guess this is an outdated issue as we have cut out TorchMetrics to a standalone package. Please, check out the latest PytorchLightning.
Try installing it from the GitHub repository first before importing it in the notebook.
Run the following command in the Notebook:
!pip install git+https://github.com/PyTorchLightning/pytorch-lightning
I've been trying to run this TensorFlow tutorial on my computer, but while running the following code I've been getting the error from the title:
import os
import shutil
import tensorflow as tf
import tensorflow_hub as hub
import tensorflow_text as text
from official.nlp import optimization # to create AdamW optmizer
import matplotlib.pyplot as plt
tf.get_logger().setLevel('ERROR')
os.environ['TF_FORCE_GPU_ALLOW_GROWTH'] = 'true'
This is the entire error traceback. While running it in the tutorial's Collab notebook there doesn't seem any problem whatsoever.
AttributeErrorTraceback (most recent call last)
<ipython-input-7-3afaa91eeb1f> in <module>
3
4 import tensorflow as tf
----> 5 import tensorflow_hub as hub
6 import tensorflow_text as text
7 from official.nlp import optimization # to create AdamW optmizer
/usr/local/lib/python3.6/dist-packages/tensorflow_hub/__init__.py in <module>
86
87
---> 88 from tensorflow_hub.estimator import LatestModuleExporter
89 from tensorflow_hub.estimator import register_module_for_export
90 from tensorflow_hub.feature_column import image_embedding_column
/usr/local/lib/python3.6/dist-packages/tensorflow_hub/estimator.py in <module>
60
61
---> 62 class LatestModuleExporter(tf.compat.v1.estimator.Exporter):
63 """Regularly exports registered modules into timestamped directories.
64
/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/lazy_loader.py in __getattr__(self, item)
60
61 def __getattr__(self, item):
---> 62 module = self._load()
63 return getattr(module, item)
64
/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/lazy_loader.py in _load(self)
43 """Load the module and insert it into the parent's globals."""
44 # Import the target module and insert it into the parent's namespace
---> 45 module = importlib.import_module(self.__name__)
46 self._parent_module_globals[self._local_name] = module
47
/usr/lib/python3.6/importlib/__init__.py in import_module(name, package)
124 break
125 level += 1
--> 126 return _bootstrap._gcd_import(name[level:], package, level)
127
128
/usr/local/lib/python3.6/dist-packages/tensorflow_estimator/python/estimator/api/_v1/estimator/__init__.py in <module>
8 import sys as _sys
9
---> 10 from tensorflow_estimator.python.estimator.api._v1.estimator import experimental
11 from tensorflow_estimator.python.estimator.api._v1.estimator import export
12 from tensorflow_estimator.python.estimator.api._v1.estimator import inputs
/usr/local/lib/python3.6/dist-packages/tensorflow_estimator/python/estimator/api/_v1/estimator/experimental/__init__.py in <module>
8 import sys as _sys
9
---> 10 from tensorflow_estimator.python.estimator.canned.dnn import dnn_logit_fn_builder
11 from tensorflow_estimator.python.estimator.canned.kmeans import KMeansClustering as KMeans
12 from tensorflow_estimator.python.estimator.canned.linear import LinearSDCA
/usr/local/lib/python3.6/dist-packages/tensorflow_estimator/python/estimator/canned/dnn.py in <module>
29 from tensorflow.python.keras.utils import losses_utils
30 from tensorflow.python.util.tf_export import estimator_export
---> 31 from tensorflow_estimator.python.estimator import estimator
32 from tensorflow_estimator.python.estimator.canned import head as head_lib
33 from tensorflow_estimator.python.estimator.canned import optimizers
/usr/local/lib/python3.6/dist-packages/tensorflow_estimator/python/estimator/estimator.py in <module>
50 from tensorflow.python.util.tf_export import estimator_export
51 from tensorflow_estimator.python.estimator import model_fn as model_fn_lib
---> 52 from tensorflow_estimator.python.estimator import run_config
53 from tensorflow_estimator.python.estimator import util as estimator_util
54 from tensorflow_estimator.python.estimator.export import export_lib
/usr/local/lib/python3.6/dist-packages/tensorflow_estimator/python/estimator/run_config.py in <module>
28 from tensorflow.core.protobuf import rewriter_config_pb2
29 from tensorflow.python.distribute import estimator_training as distribute_coordinator_training
---> 30 from tensorflow.python.distribute import parameter_server_strategy_v2
31 from tensorflow.python.util import compat_internal
32 from tensorflow.python.util import function_utils
/usr/local/lib/python3.6/dist-packages/tensorflow/python/distribute/parameter_server_strategy_v2.py in <module>
26
27 from tensorflow.python.distribute import distribute_lib
---> 28 from tensorflow.python.distribute import distribute_utils
29 from tensorflow.python.distribute import parameter_server_strategy
30 from tensorflow.python.distribute import sharded_variable
/usr/local/lib/python3.6/dist-packages/tensorflow/python/distribute/distribute_utils.py in <module>
372 # (synchronization=ON_READ, aggregation=NONE,SUM,MEAN,ONLY_FIRST_REPLICA)
373 VARIABLE_POLICY_MAPPING = {
--> 374 vs.VariableSynchronization.AUTO: values_lib.AutoPolicy,
375 vs.VariableSynchronization.ON_WRITE: values_lib.OnWritePolicy,
376 vs.VariableSynchronization.ON_READ: values_lib.OnReadPolicy,
AttributeError: module 'tensorflow.python.distribute.values' has no attribute 'AutoPolicy'
Is there any particular reason as to why is this happening?
Thank you in advance.
My guess is this could be related to a version mismatch between the versions you're using for tensorflow and tensorflow_hub.
It looks like AutoPolicy was removed from TF in a recent commit, so if you built tensorflow from source and included this commit, then that could be the issue. If you want to build TF from source, checkout the latest official release before building (2.4.1).
I have been fine with my Jupyter notebook for running my python codes for some time. However, I recently picked interests in Style Transfer with deep learning. The codes I got for practice required that I downgrade TensorFlow to a lower version. Fortunately, this worked but unfortunately, all other codes that I have developed for months (requiring TensorFlow backend) stopped working. I tried using JupyterLab and sometimes, they worked while most times, I get the same error just as in Jupyter notebook. Now, the only get around is to move my codes to Spyder (which is quite frustrating).
Every time I run my codes, I get the following error:
AttributeError Traceback (most recent call last)
<ipython-input-2-6d5587dfcc0c> in <module>
1 # import all libraries
----> 2 import keras
3 from keras.models import Sequential
4 from keras.layers import Dense, Activation, Dropout
5 from keras.layers.convolutional import Conv1D
~\Miniconda3\envs\tensorflow\lib\site-packages\keras\__init__.py in <module>
1 from __future__ import absolute_import
2
----> 3 from . import utils
4 from . import activations
5 from . import applications
~\Miniconda3\envs\tensorflow\lib\site-packages\keras\utils\__init__.py in <module>
4 from . import data_utils
5 from . import io_utils
----> 6 from . import conv_utils
7
8 # Globally-importable utils.
~\Miniconda3\envs\tensorflow\lib\site-packages\keras\utils\conv_utils.py in <module>
7 from six.moves import range
8 import numpy as np
----> 9 from .. import backend as K
10
11
~\Miniconda3\envs\tensorflow\lib\site-packages\keras\backend\__init__.py in <module>
87 elif _BACKEND == 'tensorflow':
88 sys.stderr.write('Using TensorFlow backend.\n')
---> 89 from .tensorflow_backend import *
90 else:
91 # Try and load external backend.
~\Miniconda3\envs\tensorflow\lib\site-packages\keras\backend\tensorflow_backend.py in <module>
3 from __future__ import print_function
4
----> 5 import tensorflow as tf
6 from tensorflow.python.framework import ops as tf_ops
7 from tensorflow.python.training import moving_averages
~\Miniconda3\envs\tensorflow\lib\site-packages\tensorflow\__init__.py in <module>
22
23 # pylint: disable=g-bad-import-order
---> 24 from tensorflow.python import pywrap_tensorflow # pylint: disable=unused-import
25
26 try:
~\Miniconda3\envs\tensorflow\lib\site-packages\tensorflow\python\__init__.py in <module>
86 # Bring in subpackages.
87 from tensorflow.python import data
---> 88 from tensorflow.python import keras
89 from tensorflow.python.feature_column import feature_column_lib as feature_column
90 from tensorflow.python.layers import layers
~\Miniconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\__init__.py in <module>
23
24 from tensorflow.python.keras import activations
---> 25 from tensorflow.python.keras import applications
26 from tensorflow.python.keras import backend
27 from tensorflow.python.keras import callbacks
~\Miniconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\applications\__init__.py in
<module>
23
24 from tensorflow.python.keras import backend
---> 25 from tensorflow.python.keras import engine
26 from tensorflow.python.keras import layers
27 from tensorflow.python.keras import models
~\Miniconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\engine\__init__.py in
<module>
21 # TODO(fchollet): Remove hourglass imports once external code is done importing
22 # non-public APIs.
---> 23 from tensorflow.python.keras.engine.base_layer import InputSpec
24 from tensorflow.python.keras.engine.base_layer import Layer
25 from tensorflow.python.keras.engine.input_layer import Input
~\Miniconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\engine\base_layer.py in
<module>
37 from tensorflow.python.keras import initializers
38 from tensorflow.python.keras import regularizers
---> 39 from tensorflow.python.keras.utils import generic_utils
40 from tensorflow.python.keras.utils import tf_utils
41 # A module that only depends on `keras.layers` import these from here.
~\Miniconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\utils\__init__.py in <module>
30 from tensorflow.python.keras.utils.generic_utils import Progbar
31 from tensorflow.python.keras.utils.generic_utils import serialize_keras_object
---> 32 from tensorflow.python.keras.utils.io_utils import HDF5Matrix
33 from tensorflow.python.keras.utils.layer_utils import convert_all_kernels_in_model
34 from tensorflow.python.keras.utils.layer_utils import get_source_inputs
~\Miniconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\utils\io_utils.py in <module>
27
28 try:
---> 29 import h5py
30 except ImportError:
31 h5py = None
~\Miniconda3\envs\tensorflow\lib\site-packages\h5py\__init__.py in <module>
32 raise
33
---> 34 from . import version
35
36 if version.hdf5_version_tuple != version.hdf5_built_version_tuple:
~\Miniconda3\envs\tensorflow\lib\site-packages\h5py\version.py in <module>
15
16 from collections import namedtuple
---> 17 from . import h5 as _h5
18 import sys
19 import numpy
h5py\h5.pyx in init h5py.h5()
AttributeError: type object 'h5py.h5.H5PYConfig' has no attribute '__reduce_cython__'
How do I rectify this situation?
Many thanks in anticipation of your suggestions.
I installed the recent version of Anaconda but "Import sklearn" gives an error
ImportError Traceback (most recent call last)
<ipython-input-5-b7c74cbf5af0> in <module>
----> 1 import sklearn
~\anaconda3\lib\site-packages\sklearn\__init__.py in <module>
62 else:
63 from . import __check_build
---> 64 from .base import clone
65 from .utils._show_versions import show_versions
66
~\anaconda3\lib\site-packages\sklearn\base.py in <module>
11 from scipy import sparse
12 from .externals import six
---> 13 from .utils.fixes import signature
14 from . import __version__
15
~\anaconda3\lib\site-packages\sklearn\utils\__init__.py in <module>
14 from . import _joblib
15 from ..exceptions import DataConversionWarning
---> 16 from .fixes import _Sequence as Sequence
17 from .deprecation import deprecated
18 from .validation import (as_float_array,
~\anaconda3\lib\site-packages\sklearn\utils\fixes.py in <module>
90 from ._scipy_sparse_lsqr_backport import lsqr as sparse_lsqr
91 else:
---> 92 from scipy.sparse.linalg import lsqr as sparse_lsqr # noqa
93
94
~\anaconda3\lib\site-packages\scipy\sparse\linalg\__init__.py in <module>
115 from .dsolve import *
116 from .interface import *
--> 117 from .eigen import *
118 from .matfuncs import *
119 from ._onenormest import *
~\anaconda3\lib\site-packages\scipy\sparse\linalg\eigen\__init__.py in <module>
9 from __future__ import division, print_function, absolute_import
10
---> 11 from .arpack import *
12 from .lobpcg import *
13
~\anaconda3\lib\site-packages\scipy\sparse\linalg\eigen\arpack\__init__.py in <module>
20 from __future__ import division, print_function, absolute_import
21
---> 22 from .arpack import *
~\anaconda3\lib\site-packages\scipy\sparse\linalg\eigen\arpack\arpack.py in <module>
43 __all__ = ['eigs', 'eigsh', 'svds', 'ArpackError', 'ArpackNoConvergence']
44
---> 45 from . import _arpack
46 import numpy as np
47 import warnings
ImportError: DLL load failed: The specified procedure could not be found.
I found out what the problem was. I had created an env file for Jupyter and edited it to open jupyter in Chrome. After updating python, import sklearn did not work. So I had to go back and delete that env file and everything worked fine after that. Hope this helps future readers of this post.
I installed tensorflow 1.8. When import kerasor import tensorflow.Iam getting
ImportError Traceback (most recent call last)
<ipython-input-2-dc6fd3b71e3e> in <module>
----> 1 from keras.preprocessing.text import Tokenizer
2 from keras.preprocessing.sequence import pad_sequences
3 from keras.models import Sequential
4 from keras.layers import Dense, Flatten, LSTM, Conv1D, MaxPooling1D, Dropout, Activation
5 from keras.layers.embeddings import Embedding
~\Anaconda3\lib\site-packages\keras\__init__.py in <module>
1 from __future__ import absolute_import
2
----> 3 from . import utils
4 from . import activations
5 from . import applications
~\Anaconda3\lib\site-packages\keras\utils\__init__.py in <module>
4 from . import data_utils
5 from . import io_utils
----> 6 from . import conv_utils
7
8 # Globally-importable utils.
~\Anaconda3\lib\site-packages\keras\utils\conv_utils.py in <module>
7 from six.moves import range
8 import numpy as np
----> 9 from .. import backend as K
10
11
~\Anaconda3\lib\site-packages\keras\backend\__init__.py in <module>
87 elif _BACKEND == 'tensorflow':
88 sys.stderr.write('Using TensorFlow backend.\n')
---> 89 from .tensorflow_backend import *
90 else:
91 # Try and load external backend.
~\Anaconda3\lib\site-packages\keras\backend\tensorflow_backend.py in <module>
3 from __future__ import print_function
4
----> 5 import tensorflow as tf
6 from tensorflow.python.framework import ops as tf_ops
7 from tensorflow.python.training import moving_averages
~\Anaconda3\lib\site-packages\tensorflow\__init__.py in <module>
22
23 # pylint: disable=g-bad-import-order
---> 24 from tensorflow.python import pywrap_tensorflow # pylint: disable=unused-import
25
26 try:
~\Anaconda3\lib\site-packages\tensorflow\python\__init__.py in <module>
57
58 # Protocol buffers
---> 59 from tensorflow.core.framework.graph_pb2 import *
60 from tensorflow.core.framework.node_def_pb2 import *
61 from tensorflow.core.framework.summary_pb2 import *
~\Anaconda3\lib\site-packages\tensorflow\core\framework\graph_pb2.py in <module>
4 import sys
5 _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
----> 6 from google.protobuf import descriptor as _descriptor
7 from google.protobuf import message as _message
8 from google.protobuf import reflection as _reflection
~\Anaconda3\lib\site-packages\google\protobuf\descriptor.py in <module>
45 import binascii
46 import os
---> 47 from google.protobuf.pyext import _message
48 _USE_C_DESCRIPTORS = getattr(_message, '_USE_C_DESCRIPTORS', False)
49
ImportError: DLL load failed: The specified procedure could not be found.
Please someone help me with this.