Guys, I have a error about the tensorflow_datasets “tfds.load()” function issue.
I don’t know how to solve this error in jupyter notebook, because the codes worked fine in Colab!
Any help???
Here are codes:
import tensorflow_datasets as tfds
print(tfds.__version__)
tfds’s version: 4.9.3
splits = ['train[:80%]', 'train[80%:90%]', 'train[90%:]']
splits, info = tfds.load('cats_vs_dogs', with_info=True, as_supervised=True, split = splits)
(train_examples, validation_examples, test_examples) = splits
num_examples = info.splits['train'].num_examples
num_classes = info.features['label'].num_classes
Errors like this:
---------------------------------------------------------------------------
RecursionError Traceback (most recent call last)
Cell In[6], line 6
3 splits = ['train[:80%]', 'train[80%:90%]', 'train[90%:]']
5 # load the dataset given the splits defined above
----> 6 splits, info = tfds.load('cats_vs_dogs', with_info=True, as_supervised=True, split = splits)
8 (train_examples, validation_examples, test_examples) = splits
10 num_examples = info.splits['train'].num_examples
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/core/logging/__init__.py:168, in _FunctionDecorator.__call__(self, function, instance, args, kwargs)
166 metadata = self._start_call()
167 try:
--> 168 return function(*args, **kwargs)
169 except Exception:
170 metadata.mark_error()
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/core/load.py:649, in load(name, split, data_dir, batch_size, shuffle_files, download, as_supervised, decoders, read_config, with_info, builder_kwargs, download_and_prepare_kwargs, as_dataset_kwargs, try_gcs)
530 """Loads the named dataset into a `tf.data.Dataset`.
531
532 `tfds.load` is a convenience method that:
(...)
641 Split-specific information is available in `ds_info.splits`.
642 """
643 dbuilder = _fetch_builder(
644 name,
645 data_dir,
646 builder_kwargs,
647 try_gcs,
648 )
--> 649 _download_and_prepare_builder(dbuilder, download, download_and_prepare_kwargs)
651 if as_dataset_kwargs is None:
652 as_dataset_kwargs = {}
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/core/load.py:508, in _download_and_prepare_builder(dbuilder, download, download_and_prepare_kwargs)
506 if download:
507 download_and_prepare_kwargs = download_and_prepare_kwargs or {}
--> 508 dbuilder.download_and_prepare(**download_and_prepare_kwargs)
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/core/logging/__init__.py:168, in _FunctionDecorator.__call__(self, function, instance, args, kwargs)
166 metadata = self._start_call()
167 try:
--> 168 return function(*args, **kwargs)
169 except Exception:
170 metadata.mark_error()
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/core/dataset_builder.py:691, in DatasetBuilder.download_and_prepare(self, download_dir, download_config, file_format)
689 self.info.read_from_directory(self.data_dir)
690 else:
--> 691 self._download_and_prepare(
692 dl_manager=dl_manager,
693 download_config=download_config,
694 )
696 # NOTE: If modifying the lines below to put additional information in
697 # DatasetInfo, you'll likely also want to update
698 # DatasetInfo.read_from_directory to possibly restore these attributes
699 # when reading from package data.
700 self.info.download_size = dl_manager.downloaded_size
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/core/dataset_builder.py:1547, in GeneratorBasedBuilder._download_and_prepare(self, dl_manager, download_config)
1545 else:
1546 optional_pipeline_kwargs = {}
-> 1547 split_generators = self._split_generators( # pylint: disable=unexpected-keyword-arg
1548 dl_manager, **optional_pipeline_kwargs
1549 )
1550 # TODO(tfds): Could be removed once all datasets are migrated.
1551 # https://github.com/tensorflow/datasets/issues/2537
1552 # Legacy mode (eventually convert list[SplitGeneratorLegacy] -> dict)
1553 split_generators = split_builder.normalize_legacy_split_generators(
1554 split_generators=split_generators,
1555 generator_fn=self._generate_examples,
1556 is_beam=isinstance(self, BeamBasedBuilder),
1557 )
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/image_classification/cats_vs_dogs.py:81, in CatsVsDogs._split_generators(self, dl_manager)
80 def _split_generators(self, dl_manager):
---> 81 path = dl_manager.download(_URL)
83 # There is no predefined train/val/test split for this dataset.
84 return [
85 tfds.core.SplitGenerator(
86 name=tfds.Split.TRAIN,
(...)
90 ),
91 ]
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/core/download/download_manager.py:601, in DownloadManager.download(self, url_or_urls)
599 # Add progress bar to follow the download state
600 with self._downloader.tqdm():
--> 601 return _map_promise(self._download, url_or_urls)
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/core/download/download_manager.py:831, in _map_promise(map_fn, all_inputs)
827 """Map the function into each element and resolve the promise."""
828 all_promises = tree_utils.map_structure(
829 map_fn, all_inputs
830 ) # Apply the function
--> 831 res = tree_utils.map_structure(
832 lambda p: p.get(), all_promises
833 ) # Wait promises
834 return res
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tree/__init__.py:428, in map_structure(func, *structures, **kwargs)
425 for other in structures[1:]:
426 assert_same_structure(structures[0], other, check_types=check_types)
427 return unflatten_as(structures[0],
--> 428 [func(*args) for args in zip(*map(flatten, structures))])
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/core/download/download_manager.py:832, in _map_promise.<locals>.<lambda>(p)
827 """Map the function into each element and resolve the promise."""
828 all_promises = tree_utils.map_structure(
829 map_fn, all_inputs
830 ) # Apply the function
831 res = tree_utils.map_structure(
--> 832 lambda p: p.get(), all_promises
833 ) # Wait promises
834 return res
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/promise/promise.py:512, in Promise.get(self, timeout)
510 target = self._target()
511 self._wait(timeout or DEFAULT_TIMEOUT)
--> 512 return self._target_settled_value(_raise=True)
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/promise/promise.py:516, in Promise._target_settled_value(self, _raise)
514 def _target_settled_value(self, _raise=False):
515 # type: (bool) -> Any
--> 516 return self._target()._settled_value(_raise)
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/promise/promise.py:226, in Promise._settled_value(self, _raise)
224 if _raise:
225 raise_val = self._fulfillment_handler0
--> 226 reraise(type(raise_val), raise_val, self._traceback)
227 return self._fulfillment_handler0
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/six.py:719, in reraise(tp, value, tb)
717 if value.__traceback__ is not tb:
718 raise value.with_traceback(tb)
--> 719 raise value
720 finally:
721 value = None
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/promise/promise.py:87, in try_catch(handler, *args, **kwargs)
84 def try_catch(handler, *args, **kwargs):
85 # type: (Callable, Any, Any) -> Union[Tuple[Any, None], Tuple[None, Tuple[Exception, Optional[TracebackType]]]]
86 try:
---> 87 return (handler(*args, **kwargs), None)
88 except Exception as e:
89 tb = exc_info()[2]
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/core/download/download_manager.py:408, in DownloadManager._download.<locals>.<lambda>(dl_result)
402 future = self._downloader.download(
403 url, download_tmp_dir, verify=self._verify_ssl
404 )
406 # Post-process the result
407 return future.then(
--> 408 lambda dl_result: self._register_or_validate_checksums( # pylint: disable=g-long-lambda
409 url=url,
410 path=dl_result.path,
411 computed_url_info=dl_result.url_info,
412 expected_url_info=expected_url_info,
413 checksum_path=checksum_path,
414 url_path=url_path,
415 )
416 )
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/core/download/download_manager.py:473, in DownloadManager._register_or_validate_checksums(self, path, url, expected_url_info, computed_url_info, checksum_path, url_path)
455 else:
456 # Eventually validate checksums
457 # Note:
(...)
463 # download). This is expected as it might mean the downloaded file
464 # was corrupted. Note: The tmp file isn't deleted to allow inspection.
465 _validate_checksums(
466 url=url,
467 path=path,
(...)
470 force_checksums_validation=self._force_checksums_validation,
471 )
--> 473 return self._rename_and_get_final_dl_path(
474 url=url,
475 path=path,
476 expected_url_info=expected_url_info,
477 computed_url_info=computed_url_info,
478 checksum_path=checksum_path,
479 url_path=url_path,
480 )
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/tensorflow_datasets/core/download/download_manager.py:497, in DownloadManager._rename_and_get_final_dl_path(self, url, path, expected_url_info, computed_url_info, checksum_path, url_path)
491 """Eventually rename the downloaded file if checksums were recorded."""
492 # `path` can be:
493 # * Manually downloaded
494 # * (cached) checksum_path
495 # * (cached) url_path
496 # * `tmp_dir/file` (downloaded path)
--> 497 if self._manual_dir and path.is_relative_to(self._manual_dir):
498 return path # Manually downloaded data
499 elif path == checksum_path: # Path already at final destination
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/etils/epath/abstract_path.py:78, in Path.is_relative_to(self, *other)
76 """Return True if the path is relative to another path or False."""
77 try:
---> 78 self.relative_to(*other)
79 return True
80 except ValueError:
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/pathlib.py:680, in PurePath.relative_to(self, other, walk_up, *_deprecated)
678 other = self.with_segments(other, *_deprecated)
679 for step, path in enumerate([other] + list(other.parents)):
--> 680 if self.is_relative_to(path):
681 break
682 elif not walk_up:
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/etils/epath/abstract_path.py:78, in Path.is_relative_to(self, *other)
76 """Return True if the path is relative to another path or False."""
77 try:
---> 78 self.relative_to(*other)
79 return True
80 except ValueError:
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/pathlib.py:680, in PurePath.relative_to(self, other, walk_up, *_deprecated)
678 other = self.with_segments(other, *_deprecated)
679 for step, path in enumerate([other] + list(other.parents)):
--> 680 if self.is_relative_to(path):
681 break
682 elif not walk_up:
[... skipping similar frames: Path.is_relative_to at line 78 (741 times), PurePath.relative_to at line 680 (740 times)]
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/pathlib.py:680, in PurePath.relative_to(self, other, walk_up, *_deprecated)
678 other = self.with_segments(other, *_deprecated)
679 for step, path in enumerate([other] + list(other.parents)):
--> 680 if self.is_relative_to(path):
681 break
682 elif not walk_up:
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/etils/epath/abstract_path.py:78, in Path.is_relative_to(self, *other)
76 """Return True if the path is relative to another path or False."""
77 try:
---> 78 self.relative_to(*other)
79 return True
80 except ValueError:
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/pathlib.py:678, in PurePath.relative_to(self, other, walk_up, *_deprecated)
673 msg = ("support for supplying more than one positional argument "
674 "to pathlib.PurePath.relative_to() is deprecated and "
675 "scheduled for removal in Python {remove}")
676 warnings._deprecated("pathlib.PurePath.relative_to(*args)", msg,
677 remove=(3, 14))
--> 678 other = self.with_segments(other, *_deprecated)
679 for step, path in enumerate([other] + list(other.parents)):
680 if self.is_relative_to(path):
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/pathlib.py:385, in PurePath.with_segments(self, *pathsegments)
380 def with_segments(self, *pathsegments):
381 """Construct a new path object from any number of path-like objects.
382 Subclasses may override this method to customize how new path objects
383 are created from methods like `iterdir()`.
384 """
--> 385 return type(self)(*pathsegments)
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/etils/epath/gpath.py:81, in _GPath.__new__(cls, *parts)
80 def __new__(cls: Type[_P], *parts: PathLike) -> _P:
---> 81 full_path = '/'.join(os.fspath(p) for p in parts)
82 if full_path.startswith(_URI_PREFIXES):
83 prefix, _ = full_path.split('://', maxsplit=1)
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/etils/epath/gpath.py:81, in <genexpr>(.0)
80 def __new__(cls: Type[_P], *parts: PathLike) -> _P:
---> 81 full_path = '/'.join(os.fspath(p) for p in parts)
82 if full_path.startswith(_URI_PREFIXES):
83 prefix, _ = full_path.split('://', maxsplit=1)
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/etils/epath/gpath.py:133, in _GPath.__fspath__(self)
132 def __fspath__(self) -> str:
--> 133 return self._path_str
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/etils/epath/gpath.py:130, in _GPath._path_str(self)
128 return self._PATH.join(f'{uri_scheme}://', *self.parts[2:])
129 else:
--> 130 return self._PATH.join(*self.parts) if self.parts else '.'
File <frozen posixpath>:82, in join(a, *p)
RecursionError: maximum recursion depth exceeded while calling a Python object