ValueError: Not enough values to unpack in tfds.load - python-3.x

I am new to Python and Tensorflow. While executing the tfds.load function, I got following error. I have spent hours trying to understand the error, but I'm at a loss. Any help would be appreciated.
I am using following versions: python 3.8, tensorflow 2.3 and tensorflow-datasets 1.2
ValueError Traceback (most recent call last)
<ipython-input-2-41baf13b8c3f> in <module>
----> 1 mnistdataset, mnist_info = tfds.load("mnist",
with_info=True, as_supervised=True)
~\anaconda3\envs\py3-TF2.0\lib\site-packages\tensorflow_datasets\core\api_utils.py in
disallow_positional_args_dec(fn, instance, args, kwargs)
50 _check_no_positional(fn, args, ismethod, allowed=allowed)
51 _check_required(fn, kwargs)
---> 52 return fn(*args, **kwargs)
53
54 return disallow_positional_args_dec(wrapped) # pylint: disable=no-value-for-parameter
~\anaconda3\envs\py3-TF2.0\lib\site-packages\tensorflow_datasets\core\registered.py in load(name,
split, data_dir, batch_size, in_memory, shuffle_files, download, as_supervised, decoders, with_info,
builder_kwargs, download_and_prepare_kwargs, as_dataset_kwargs, try_gcs)
298 if download:
299 download_and_prepare_kwargs = download_and_prepare_kwargs or {}
--> 300 dbuilder.download_and_prepare(**download_and_prepare_kwargs)
301
302 if as_dataset_kwargs is None:
~\anaconda3\envs\py3-TF2.0\lib\site-packages\tensorflow_datasets\core\api_utils.py in
disallow_positional_args_dec(fn, instance, args, kwargs)
50 _check_no_positional(fn, args, ismethod, allowed=allowed)
51 _check_required(fn, kwargs)
---> 52 return fn(*args, **kwargs)
53
54 return disallow_positional_args_dec(wrapped) # pylint: disable=no-value-for-parameter
~\anaconda3\envs\py3-TF2.0\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in
download_and_prepare(self, download_dir, download_config)
260 dl_manager = self._make_download_manager(
261 download_dir=download_dir,
--> 262 download_config=download_config)
263
264 # Currently it's not possible to overwrite the data because it would
~\anaconda3\envs\py3-TF2.0\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in
_make_download_manager(self, download_dir, download_config)
660 force_download=(download_config.download_mode == FORCE_REDOWNLOAD),
661 force_extraction=(download_config.download_mode == FORCE_REDOWNLOAD),
--> 662 register_checksums=download_config.register_checksums,
663 )
664
~\anaconda3\envs\py3-TF2.0\lib\site-packages\tensorflow_datasets\core\api_utils.py in
disallow_positional_args_dec(fn, instance, args, kwargs)
50 _check_no_positional(fn, args, ismethod, allowed=allowed)
51 _check_required(fn, kwargs)
---> 52 return fn(*args, **kwargs)
53
54 return disallow_positional_args_dec(wrapped) # pylint: disable=no-value-for-parameter
~\anaconda3\envs\py3-TF2.0\lib\site-packages\tensorflow_datasets\core\download\download_manager.py in
__init__(self, download_dir, extract_dir, manual_dir, dataset_name, force_download, force_extraction,
register_checksums)
175 self._register_checksums = register_checksums
176 # All known URLs: {url: (size, checksum)}
--> 177 self._sizes_checksums = checksums.get_all_sizes_checksums()
178 # To record what is being used: {url: (size, checksum)}
179 self._recorded_sizes_checksums = {}
~\anaconda3\envs\py3-TF2.0\lib\site-packages\tensorflow_datasets\core\download\checksums.py in
get_all_sizes_checksums()
127 sizes_checksums = {}
128 for path in _checksum_paths().values():
--> 129 data = _get_sizes_checksums(path)
130 for url, size_checksum in data.items():
131 if (url in sizes_checksums and
~\anaconda3\envs\py3-TF2.0\lib\site-packages\tensorflow_datasets\core\download\checksums.py in
_get_sizes_checksums(checksums_path)
117 continue
118 # URL might have spaces inside, but size and checksum will not.
--> 119 url, size, checksum = line.rsplit(' ', 2)
120 checksums[url] = (int(size), checksum)
121 return checksums
ValueError: not enough values to unpack (expected 3, got 1)

From comments
After upgrading tensorflow-datasets from 1.2 to 4.2, issue was
resolved. (paraphrased from Niteya Shah)

I also was having the issues and this would solve the problems:
pip install tensorflow-datasets=4.3

I had the same problem, my solution, create a new environment just with:
conda create --name py3-TF2.0 python=3
conda activate py3-TF2.0
pip install --upgrade pip
pip install tensorflow
pip install --upgrade tensorflow
pip install tensorflow-datasets
pip install ipykernel

Related

Huggingface tokenizer not able to load model after upgrading python to 3.10

I just updated Python to version 3.10.8. Note that I use JupyterLab.
I had to re-install a lot of packages, but now I get an error when I try to load the tokenizer of an HuggingFace model
This is my code:
# Import libraries
from transformers import pipeline, AutoTokenizer
# Define checkpoint
model_checkpoint = 'deepset/xlm-roberta-large-squad2'
# Tokenizer
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
Note that version of transformers is 4.24.0.
This is the error I get:
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
Cell In [3], line 2
1 # Tokenizer
----> 2 tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
File ~/.local/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py:637, in AutoTokenizer.from_pretrained(cls, pretrained_model_name_or_path, *inputs, **kwargs)
635 tokenizer_class_py, tokenizer_class_fast = TOKENIZER_MAPPING[type(config)]
636 if tokenizer_class_fast and (use_fast or tokenizer_class_py is None):
--> 637 return tokenizer_class_fast.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
638 else:
639 if tokenizer_class_py is not None:
File ~/.local/lib/python3.10/site-packages/transformers/tokenization_utils_base.py:1777, in PreTrainedTokenizerBase.from_pretrained(cls, pretrained_model_name_or_path, *init_inputs, **kwargs)
1774 else:
1775 logger.info(f"loading file {file_path} from cache at {resolved_vocab_files[file_id]}")
-> 1777 return cls._from_pretrained(
1778 resolved_vocab_files,
1779 pretrained_model_name_or_path,
1780 init_configuration,
1781 *init_inputs,
1782 use_auth_token=use_auth_token,
1783 cache_dir=cache_dir,
1784 local_files_only=local_files_only,
1785 _commit_hash=commit_hash,
1786 **kwargs,
1787 )
File ~/.local/lib/python3.10/site-packages/transformers/tokenization_utils_base.py:1932, in PreTrainedTokenizerBase._from_pretrained(cls, resolved_vocab_files, pretrained_model_name_or_path, init_configuration, use_auth_token, cache_dir, local_files_only, _commit_hash, *init_inputs, **kwargs)
1930 # Instantiate tokenizer.
1931 try:
-> 1932 tokenizer = cls(*init_inputs, **init_kwargs)
1933 except OSError:
1934 raise OSError(
1935 "Unable to load vocabulary from file. "
1936 "Please check that the provided vocabulary is accessible and not corrupted."
1937 )
File ~/.local/lib/python3.10/site-packages/transformers/models/xlm_roberta/tokenization_xlm_roberta_fast.py:155, in XLMRobertaTokenizerFast.__init__(self, vocab_file, tokenizer_file, bos_token, eos_token, sep_token, cls_token, unk_token, pad_token, mask_token, **kwargs)
139 def __init__(
140 self,
141 vocab_file=None,
(...)
151 ):
152 # Mask token behave like a normal word, i.e. include the space before it
153 mask_token = AddedToken(mask_token, lstrip=True, rstrip=False) if isinstance(mask_token, str) else mask_token
--> 155 super().__init__(
156 vocab_file,
157 tokenizer_file=tokenizer_file,
158 bos_token=bos_token,
159 eos_token=eos_token,
160 sep_token=sep_token,
161 cls_token=cls_token,
162 unk_token=unk_token,
163 pad_token=pad_token,
164 mask_token=mask_token,
165 **kwargs,
166 )
168 self.vocab_file = vocab_file
169 self.can_save_slow_tokenizer = False if not self.vocab_file else True
File ~/.local/lib/python3.10/site-packages/transformers/tokenization_utils_fast.py:114, in PreTrainedTokenizerFast.__init__(self, *args, **kwargs)
111 fast_tokenizer = TokenizerFast.from_file(fast_tokenizer_file)
112 elif slow_tokenizer is not None:
113 # We need to convert a slow tokenizer to build the backend
--> 114 fast_tokenizer = convert_slow_tokenizer(slow_tokenizer)
115 elif self.slow_tokenizer_class is not None:
116 # We need to create and convert a slow tokenizer to build the backend
117 slow_tokenizer = self.slow_tokenizer_class(*args, **kwargs)
File ~/.local/lib/python3.10/site-packages/transformers/convert_slow_tokenizer.py:1162, in convert_slow_tokenizer(transformer_tokenizer)
1154 raise ValueError(
1155 f"An instance of tokenizer class {tokenizer_class_name} cannot be converted in a Fast tokenizer instance."
1156 " No converter was found. Currently available slow->fast convertors:"
1157 f" {list(SLOW_TO_FAST_CONVERTERS.keys())}"
1158 )
1160 converter_class = SLOW_TO_FAST_CONVERTERS[tokenizer_class_name]
-> 1162 return converter_class(transformer_tokenizer).converted()
File ~/.local/lib/python3.10/site-packages/transformers/convert_slow_tokenizer.py:438, in SpmConverter.__init__(self, *args)
434 requires_backends(self, "protobuf")
436 super().__init__(*args)
--> 438 from .utils import sentencepiece_model_pb2 as model_pb2
440 m = model_pb2.ModelProto()
441 with open(self.original_tokenizer.vocab_file, "rb") as f:
File ~/.local/lib/python3.10/site-packages/transformers/utils/sentencepiece_model_pb2.py:20
18 from google.protobuf import descriptor as _descriptor
19 from google.protobuf import message as _message
---> 20 from google.protobuf import reflection as _reflection
21 from google.protobuf import symbol_database as _symbol_database
24 # ##protoc_insertion_point(imports)
File /usr/lib/python3/dist-packages/google/protobuf/reflection.py:58
56 from google.protobuf.pyext import cpp_message as message_impl
57 else:
---> 58 from google.protobuf.internal import python_message as message_impl
60 # The type of all Message classes.
61 # Part of the public interface, but normally only used by message factories.
62 GeneratedProtocolMessageType = message_impl.GeneratedProtocolMessageType
File /usr/lib/python3/dist-packages/google/protobuf/internal/python_message.py:69
66 import copyreg as copyreg
68 # We use "as" to avoid name collisions with variables.
---> 69 from google.protobuf.internal import containers
70 from google.protobuf.internal import decoder
71 from google.protobuf.internal import encoder
File /usr/lib/python3/dist-packages/google/protobuf/internal/containers.py:182
177 collections.MutableMapping.register(MutableMapping)
179 else:
180 # In Python 3 we can just use MutableMapping directly, because it defines
181 # __slots__.
--> 182 MutableMapping = collections.MutableMapping
185 class BaseContainer(object):
187 """Base container class."""
AttributeError: module 'collections' has no attribute 'MutableMapping'
I tried several solutions (for example, this and this), but none seem to work.
According to this link, I should change collections.Mapping into collections.abc.Mapping, but I wouldn't knwo where to do it.
Another possible solution is downgrading Python to 3.9, but I would like to keep it as last resort.
How can I fix this?
Turned out it was a problem related to protobuf module. I updated it to the latest version to date (which is 4.21.9).
This changed the error to:
TypeError: Descriptors cannot not be created directly.
If this call came from a _pb2.py file, your generated code is out of date and must be regenerated with protoc >= 3.19.0.
If you cannot immediately regenerate your protos, some other possible workarounds are:
1. Downgrade the protobuf package to 3.20.x or lower.
2. Set PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python (but this will use pure-Python parsing and will be much slower).
More information: https://developers.google.com/protocol-buffers/docs/news/2022-05-06#python-updates
So I downgraded protobuf to version 3.20.0 and that worked.
For further details, look here.

Downloading "Imdb_reviews" from Tensorflow_datasets: UnicodeDecodeError: 'utf-8' codec can't decode byte 0xd5 in position 30 invalid continuation byte

When I was downloading "imbd_reviews" dataset I am facing the below error,
'utf-8' codec can't decode byte 0xc5 in position 171: invalid continuation byte
import tensorflow_datasets as tfds
datasets, info = tfds.load("imdb_reviews",as_supervised=True, with_info=True)
Downloading and preparing dataset imdb_reviews (80.23 MiB) to C:\Users\desig\tensorflow_datasets\imdb_reviews\plain_text\0.1.0...
Dl Completed...:
0/0 [00:00<?, ? url/s]
Dl Size...:
0/0 [00:00<?, ? MiB/s]
---------------------------------------------------------------------------
UnicodeDecodeError Traceback (most recent call last)
<ipython-input-6-f3ae52bd604b> in <module>
1 import numpy as np
----> 2 datasets, info = tfds.load("imdb_reviews",as_supervised=True, with_info=True)
3
~\anaconda3\lib\site-packages\tensorflow_datasets\core\api_utils.py in disallow_positional_args_dec(fn, instance, args, kwargs)
50 _check_no_positional(fn, args, ismethod, allowed=allowed)
51 _check_required(fn, kwargs)
---> 52 return fn(*args, **kwargs)
53
54 return disallow_positional_args_dec(wrapped) # pylint: disable=no-value-for-parameter
~\anaconda3\lib\site-packages\tensorflow_datasets\core\registered.py in load(name, split, data_dir, batch_size, in_memory, shuffle_files, download, as_supervised, decoders, with_info, builder_kwargs, download_and_prepare_kwargs, as_dataset_kwargs, try_gcs)
298 if download:
299 download_and_prepare_kwargs = download_and_prepare_kwargs or {}
--> 300 dbuilder.download_and_prepare(**download_and_prepare_kwargs)
301
302 if as_dataset_kwargs is None:
~\anaconda3\lib\site-packages\tensorflow_datasets\core\api_utils.py in disallow_positional_args_dec(fn, instance, args, kwargs)
50 _check_no_positional(fn, args, ismethod, allowed=allowed)
51 _check_required(fn, kwargs)
---> 52 return fn(*args, **kwargs)
53
54 return disallow_positional_args_dec(wrapped) # pylint: disable=no-value-for-parameter
~\anaconda3\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in download_and_prepare(self, download_dir, download_config)
305 self.info.size_in_bytes = dl_manager.downloaded_size
306 # Write DatasetInfo to disk, even if we haven't computed the statistics.
--> 307 self.info.write_to_directory(self._data_dir)
308 self._log_download_done()
309
~\anaconda3\lib\contextlib.py in __exit__(self, type, value, traceback)
118 if type is None:
119 try:
--> 120 next(self.gen)
121 except StopIteration:
122 return False
~\anaconda3\lib\site-packages\tensorflow_datasets\core\file_format_adapter.py in incomplete_dir(dirname)
198 try:
199 yield tmp_dir
--> 200 tf.io.gfile.rename(tmp_dir, dirname)
201 finally:
202 if tf.io.gfile.exists(tmp_dir):
~\anaconda3\lib\site-packages\tensorflow\python\lib\io\file_io.py in rename_v2(src, dst, overwrite)
543 errors.OpError: If the operation fails.
544 """
--> 545 _pywrap_file_io.RenameFile(
546 compat.as_bytes(src), compat.as_bytes(dst), overwrite)
547
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xc5 in position 171: invalid continuation byte
Tensorflow version - 2.3.0
numpy version - 1.18.5
python version - 3.8.8
windows10 x64
Does any one have an idea, Thank you.
My tensorlflow version is 2.4.1 and I solved it by updating tfds to 4.5.2. Therefore, update tfds to a new version may be useful.
(As mentioned by 徐奥博)
Please try again by upgrading the Tensorflow version or tensorflow-datasets as below:
pip install --upgrade tensorflow
pip install --upgrade tensorflow-datasets
import tensorflow_datasets as tfds
datasets, info = tfds.load("imdb_reviews",as_supervised=True, with_info=True)

Error using tfds.load on Tensorflow Dataset

I was wondering if tensorflow 2.2 dataset has an issue on Windows release.
Here is my diagnostic code
import numpy as np
import tensorflow as tf
import tensorflow_hub as hub
import tensorflow_datasets as tfds
print("Version: ", tf.__version__)
print("Eager mode: ", tf.executing_eagerly())
print("Hub version: ", hub.__version__)
print("GPU is", "available" if tf.config.experimental.list_physical_devices("GPU") else "NOT AVAILABLE")
Version: 2.2.0
Eager mode: True
Hub version: 0.8.0
GPU is available
I can load the list of datasets
tfds.list_builders()
['abstract_reasoning',
'aeslc',
'aflw2k3d',
'amazon_us_reviews',
'anli',
.
.
.
'xnli',
'xsum',
'yelp_polarity_reviews']
However, I am unable to load any dataset
imdb, info = tfds.load('imdb_reviews', with_info=True, as_supervised=True)
I receive the following errors
---------------------------------------------------------------------------
UnimplementedError Traceback (most recent call last)
c:\python37\lib\site-packages\tensorflow_datasets\core\utils\py_utils.py in try_reraise(*args, **kwargs)
398 try:
--> 399 yield
400 except Exception: # pylint: disable=broad-except
c:\python37\lib\site-packages\tensorflow_datasets\core\registered.py in builder(name, **builder_init_kwargs)
243 prefix="Failed to construct dataset {}".format(name)):
--> 244 return builder_cls(name)(**builder_kwargs)
245
c:\python37\lib\site-packages\wrapt\wrappers.py in __call__(self, *args, **kwargs)
602 return self._self_wrapper(self.__wrapped__, self._self_instance,
--> 603 args, kwargs)
604
c:\python37\lib\site-packages\tensorflow_datasets\core\api_utils.py in disallow_positional_args_dec(fn, instance, args, kwargs)
68 _check_required(fn, kwargs)
---> 69 return fn(*args, **kwargs)
70
c:\python37\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in __init__(self, data_dir, config, version)
205 else: # Use the code version (do not restore data)
--> 206 self.info.initialize_from_bucket()
207
c:\python37\lib\site-packages\tensorflow_datasets\core\dataset_info.py in initialize_from_bucket(self)
422 tmp_dir = tempfile.mkdtemp("tfds")
--> 423 data_files = gcs_utils.gcs_dataset_info_files(self.full_name)
424 if not data_files:
c:\python37\lib\site-packages\tensorflow_datasets\core\utils\gcs_utils.py in gcs_dataset_info_files(dataset_dir)
69 """Return paths to GCS files in the given dataset directory."""
---> 70 return gcs_listdir(posixpath.join(GCS_DATASET_INFO_DIR, dataset_dir))
71
c:\python37\lib\site-packages\tensorflow_datasets\core\utils\gcs_utils.py in gcs_listdir(dir_name)
62 root_dir = gcs_path(dir_name)
---> 63 if _is_gcs_disabled or not tf.io.gfile.exists(root_dir):
64 return None
c:\python37\lib\site-packages\tensorflow\python\lib\io\file_io.py in file_exists_v2(path)
266 try:
--> 267 _pywrap_file_io.FileExists(compat.as_bytes(path))
268 except errors.NotFoundError:
UnimplementedError: File system scheme 'gs' not implemented (file: 'gs://tfds-data/dataset_info/imdb_reviews/plain_text/1.0.0')
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-36-06930b64f980> in <module>
1 #tfds.list_builders()
----> 2 imdb, info = tfds.load('imdb_reviews', with_info=True, as_supervised=True)
c:\python37\lib\site-packages\wrapt\wrappers.py in __call__(self, *args, **kwargs)
562
563 return self._self_wrapper(self.__wrapped__, self._self_instance,
--> 564 args, kwargs)
565
566 class BoundFunctionWrapper(_FunctionWrapperBase):
c:\python37\lib\site-packages\tensorflow_datasets\core\api_utils.py in disallow_positional_args_dec(fn, instance, args, kwargs)
67 _check_no_positional(fn, args, ismethod, allowed=allowed)
68 _check_required(fn, kwargs)
---> 69 return fn(*args, **kwargs)
70
71 return disallow_positional_args_dec(wrapped) # pylint: disable=no-value-for-parameter
c:\python37\lib\site-packages\tensorflow_datasets\core\registered.py in load(name, split, data_dir, batch_size, shuffle_files, download, as_supervised, decoders, read_config, with_info, builder_kwargs, download_and_prepare_kwargs, as_dataset_kwargs, try_gcs)
366 data_dir = constants.DATA_DIR
367
--> 368 dbuilder = builder(name, data_dir=data_dir, **builder_kwargs)
369 if download:
370 download_and_prepare_kwargs = download_and_prepare_kwargs or {}
c:\python37\lib\site-packages\tensorflow_datasets\core\registered.py in builder(name, **builder_init_kwargs)
242 with py_utils.try_reraise(
243 prefix="Failed to construct dataset {}".format(name)):
--> 244 return builder_cls(name)(**builder_kwargs)
245
246
c:\python37\lib\contextlib.py in __exit__(self, type, value, traceback)
128 value = type()
129 try:
--> 130 self.gen.throw(type, value, traceback)
131 except StopIteration as exc:
132 # Suppress StopIteration *unless* it's the same exception that
c:\python37\lib\site-packages\tensorflow_datasets\core\utils\py_utils.py in try_reraise(*args, **kwargs)
399 yield
400 except Exception: # pylint: disable=broad-except
--> 401 reraise(*args, **kwargs)
402
403
c:\python37\lib\site-packages\tensorflow_datasets\core\utils\py_utils.py in reraise(prefix, suffix)
390 suffix = '\n' + suffix if suffix else ''
391 msg = prefix + str(exc_value) + suffix
--> 392 six.reraise(exc_type, exc_type(msg), exc_traceback)
393
394
TypeError: __init__() missing 2 required positional arguments: 'op' and 'message'
Is the library broken? As mentioned, I am on Windows 10 machine and using Jupyter Lab.
After I reported the issue on GitHub, the problem was fixed in version 3.2.1.

Google Colab - tensowflow object detection api - 'function' object has no attribute 'called'

I encountered the following error when I try to test the object detection api model_builder_test.py.
!apt-get install -y -qq protobuf-compiler python-pil python-lxml
!git clone --quiet https://github.com/tensorflow/models.git
import os
os.chdir('models/research')
!protoc object_detection/protos/*.proto --python_out=.
import sys
sys.path.append('/content/models/research/slim')
%run object_detection/builders/model_builder_test.py
The following error appears after running the model_builder_test.py
.W0220 03:22:35.097244 140099951081344 deprecation.py:323] From
/content/models/research/object_detection/anchor_generators/grid_anchor_generator.py:59:
to_float (from tensorflow.python.ops.math_ops) is deprecated and will
be removed in a future version. Instructions for updating: Use tf.cast
instead. .. WARNING: The TensorFlow contrib module will not be
included in TensorFlow 2.0. For more information, please see: *
https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md
* https://github.com/tensorflow/addons If you depend on functionality not listed there, please file an issue.
..................s
---------------------------------------------------------------------- Ran 22 tests in 0.203s
OK (skipped=1)
--------------------------------------------------------------------------- AttributeError Traceback (most recent call
last) in ()
----> 1 get_ipython().magic('run object_detection/builders/model_builder_test.py')
/usr/local/lib/python3.6/dist-packages/IPython/core/interactiveshell.py
in magic(self, arg_s) 2158 magic_name, _, magic_arg_s =
arg_s.partition(' ') 2159 magic_name =
magic_name.lstrip(prefilter.ESC_MAGIC)
-> 2160 return self.run_line_magic(magic_name, magic_arg_s) 2161 2162
-------------------------------------------------------------------------
/usr/local/lib/python3.6/dist-packages/IPython/core/interactiveshell.py
in run_line_magic(self, magic_name, line) 2079
kwargs['local_ns'] = sys._getframe(stack_depth).f_locals 2080
with self.builtin_trap:
-> 2081 result = fn(*args,**kwargs) 2082 return result 2083
in run(self, parameter_s, runner, file_finder)
/usr/local/lib/python3.6/dist-packages/IPython/core/magic.py in
(f, *a, **k)
186 # but it's overkill for just that one bit of state.
187 def magic_deco(arg):
--> 188 call = lambda f, *a, **k: f(*a, **k)
189
190 if callable(arg):
/usr/local/lib/python3.6/dist-packages/IPython/core/magics/execution.py
in run(self, parameter_s, runner, file_finder)
740 else:
741 # regular execution
--> 742 run()
743
744 if 'i' in opts:
/usr/local/lib/python3.6/dist-packages/IPython/core/magics/execution.py
in run()
726 def run():
727 runner(filename, prog_ns, prog_ns,
--> 728 exit_ignore=exit_ignore)
729
730 if 't' in opts:
/usr/local/lib/python3.6/dist-packages/IPython/core/pylabtools.py in
mpl_execfile(fname, *where, **kw)
175 matplotlib.interactive(is_interactive)
176 # make rendering call now, if the user tried to do it
--> 177 if plt.draw_if_interactive.called:
178 plt.draw()
179 plt.draw_if_interactive.called = False
AttributeError: 'function' object has no attribute 'called'
This is how I overcame the issue:
install prompt-toolkit to the version 1.0.15, as explained in the link below
https://github.com/jupyter/jupyter_console/issues/158
restart the runtime to activate the package
use '!python' instead of '%run'

ipywidgets is not supporting in python terminal

I am trying to run below code using python terminal, it works completely fine when i run in jupyter notebook, but in ubuntu terminal it throws error. even tried running jupyter nbextension enable --py --sys-prefix widgetsnbextension on the terminal still I receive same error
import matplotlib as mp
from pylab import *
from sklearn import datasets
from ipywidgets import interact, widgets
from IPython.display import display, clear_output
faces = datasets.fetch_olivetti_faces()
class Trainer:
def __init__(self):
self.results = {}
self.imgs = faces.images
self.index = 0
def increment_face(self):
if self.index + 1 >= len(self.imgs):
return self.index
else:
while str(self.index) in self.results:
print(self.index)
self.index += 1
return self.index
def record_result(self, smile=True):
self.results[str(self.index)] = smile
trainer = Trainer()
button_smile = widgets.Button(description='smile')
button_no_smile = widgets.Button(description='sad face')
def display_face(face):
clear_output()
imshow(face, cmap='gray')
axis('off')
show()
def update_smile(b):
trainer.record_result(smile=True)
trainer.increment_face()
display_face(trainer.imgs[trainer.index])
def update_no_smile(b):
trainer.record_result(smile=False)
trainer.increment_face()
display_face(trainer.imgs[trainer.index])
button_no_smile.on_click(update_no_smile)
button_smile.on_click(update_smile)
display(button_smile)
display(button_no_smile)
display_face(trainer.imgs[trainer.index])
Traceback error when I was running from python terminal
Widget Javascript not detected. It may not be installed properly. Did you enable the widgetsnbextension? If not, then run "jupyter nbextension enable --py --sys-prefix widgetsnbextension"
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
/home/harish/anaconda3/envs/facial_env/lib/python3.4/site-packages/IPython/core/formatters.py in __call__(self, obj)
880 method = get_real_method(obj, self.print_method)
881 if method is not None:
--> 882 method()
883 return True
884
/home/harish/anaconda3/envs/facial_env/lib/python3.4/site-packages/ipywidgets/widgets/widget.py in _ipython_display_(self, **kwargs)
480 loud_error('The installed widget Javascript is the wrong version.')
481
--> 482 self._send({"method": "display"})
483 self._handle_displayed(**kwargs)
484
/home/harish/anaconda3/envs/facial_env/lib/python3.4/site-packages/ipywidgets/widgets/widget.py in _send(self, msg, buffers)
485 def _send(self, msg, buffers=None):
486 """Sends a message to the model in the front-end."""
--> 487 self.comm.send(data=msg, buffers=buffers)
488
489
/home/harish/anaconda3/envs/facial_env/lib/python3.4/site-packages/ipykernel/comm/comm.py in send(self, data, metadata, buffers)
119 """Send a message to the frontend-side version of this comm"""
120 self._publish_msg('comm_msg',
--> 121 data=data, metadata=metadata, buffers=buffers,
122 )
123
/home/harish/anaconda3/envs/facial_env/lib/python3.4/site-packages/ipykernel/comm/comm.py in _publish_msg(self, msg_type, data, metadata, buffers, **keys)
64 metadata = {} if metadata is None else metadata
65 content = json_clean(dict(data=data, comm_id=self.comm_id, **keys))
---> 66 self.kernel.session.send(self.kernel.iopub_socket, msg_type,
67 content,
68 metadata=json_clean(metadata),
AttributeError: 'NoneType' object has no attribute 'session'
<ipywidgets.widgets.widget_button.Button at 0x7f0de5ebfb38>
Widget Javascript not detected. It may not be installed properly. Did you enable the widgetsnbextension? If not, then run "jupyter nbextension enable --py --sys-prefix widgetsnbextension"
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
/home/harish/anaconda3/envs/facial_env/lib/python3.4/site-packages/IPython/core/formatters.py in __call__(self, obj)
880 method = get_real_method(obj, self.print_method)
881 if method is not None:
--> 882 method()
883 return True
884
/home/harish/anaconda3/envs/facial_env/lib/python3.4/site-packages/ipywidgets/widgets/widget.py in _ipython_display_(self, **kwargs)
480 loud_error('The installed widget Javascript is the wrong version.')
481
--> 482 self._send({"method": "display"})
483 self._handle_displayed(**kwargs)
484
/home/harish/anaconda3/envs/facial_env/lib/python3.4/site-packages/ipywidgets/widgets/widget.py in _send(self, msg, buffers)
485 def _send(self, msg, buffers=None):
486 """Sends a message to the model in the front-end."""
--> 487 self.comm.send(data=msg, buffers=buffers)
488
489
/home/harish/anaconda3/envs/facial_env/lib/python3.4/site-packages/ipykernel/comm/comm.py in send(self, data, metadata, buffers)
119 """Send a message to the frontend-side version of this comm"""
120 self._publish_msg('comm_msg',
--> 121 data=data, metadata=metadata, buffers=buffers,
122 )
123
/home/harish/anaconda3/envs/facial_env/lib/python3.4/site-packages/ipykernel/comm/comm.py in _publish_msg(self, msg_type, data, metadata, buffers, **keys)
64 metadata = {} if metadata is None else metadata
65 content = json_clean(dict(data=data, comm_id=self.comm_id, **keys))
---> 66 self.kernel.session.send(self.kernel.iopub_socket, msg_type,
67 content,
68 metadata=json_clean(metadata),
AttributeError: 'NoneType' object has no attribute 'session'
<ipywidgets.widgets.widget_button.Button at 0x7f0ddadc2cc0>
From user6764549 comments:
Ubuntu terminal cannot support interactive widgets. You need to run it in a "browser"-like or GUI enabled environment that can run Javascript.
Jupyter notebook is not the problem, Ubuntu terminal is the problem. It does not support displaying images or widgets. If you are only trying to avoid using a browser, you can look at Jupyter Qt console or Spyder.

Resources