Type Error with Tensor Flow and MNIST Dataset

I’ve been trying to run a mnist file but I received a type error that I don’t know how to fix. After looking at each file listed in the type error, I saw that the code was written as it should be in line indicated in each file. I’m not sure if I should delete the lines or modify them.

Code:

import numpy as np
import tensorflow as tf
import tensorflow_datasets as tfds

mnist_dataset, mnist_info = tfds.load(name='mnist', with_info=True, as_supervised=True)
# tfds.load actually loads a dataset (or downloads and then loads if that's the first time you use it) 
# in our case, we are interesteed in the MNIST; the name of the dataset is the only mandatory argument
# there are other arguments we can specify, which we can find useful
# mnist_dataset = tfds.load(name='mnist', as_supervised=True)
# with_info=True will also provide us with a tuple containing information about the version, features, number of samples

TypeError                                 Traceback (most recent call last)
C:\Users\Roshinator\AppData\Local\Temp\ipykernel_15208\1759360522.py in <module>
      8 # there are other arguments we can specify, which we can find useful
      9 # mnist_dataset = tfds.load(name='mnist', as_supervised=True)
---> 10 mnist_dataset, mnist_info = tfds.load(name='mnist', with_info=True, as_supervised=True)
     11 # with_info=True will also provide us with a tuple containing information about the version, features, number of samples
     12 # we will use this information a bit below and we will store it in mnist_info

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tensorflow_datasets\core\logging\__init__.py in __call__(self, function, instance, args, kwargs)
    167     metadata = self._start_call()
    168     try:
--> 169       return function(*args, **kwargs)
    170     except Exception:
    171       metadata.mark_error()

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tensorflow_datasets\core\load.py in load(name, split, data_dir, batch_size, shuffle_files, download, as_supervised, decoders, read_config, with_info, builder_kwargs, download_and_prepare_kwargs, as_dataset_kwargs, try_gcs)
    615   if download:
    616     download_and_prepare_kwargs = download_and_prepare_kwargs or {}
--> 617     dbuilder.download_and_prepare(**download_and_prepare_kwargs)
    618 
    619   if as_dataset_kwargs is None:

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tensorflow_datasets\core\logging\__init__.py in __call__(self, function, instance, args, kwargs)
    167     metadata = self._start_call()
    168     try:
--> 169       return function(*args, **kwargs)
    170     except Exception:
    171       metadata.mark_error()

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in download_and_prepare(self, download_dir, download_config, file_format)
    628           self._download_and_prepare(
    629               dl_manager=dl_manager,
--> 630               download_config=download_config,
    631           )
    632 

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tensorflow_datasets\core\dataset_builder.py in _download_and_prepare(self, dl_manager, download_config)
   1475             generator=generator,
   1476             filename_template=filename_template,
-> 1477             disable_shuffling=self.info.disable_shuffling,
   1478         )
   1479         split_info_futures.append(future)

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tensorflow_datasets\core\split_builder.py in submit_split_generation(self, split_name, generator, filename_template, disable_shuffling)
    338     # `_build_from_xyz` method.
    339     if isinstance(generator, collections.abc.Iterable):
--> 340       return self._build_from_generator(**build_kwargs)
    341     else:  # Otherwise, beam required
    342       unknown_generator_type = TypeError(

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tensorflow_datasets\core\split_builder.py in _build_from_generator(self, split_name, generator, filename_template, disable_shuffling)
    409         unit=' examples',
    410         total=total_num_examples,
--> 411         leave=False,
    412     ):
    413       try:

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tqdm\notebook.py in __iter__(self)
    248         try:
    249             it = super().__iter__()
--> 250             for obj in it:
    251                 # return super(tqdm...) will not catch exception
    252                 yield obj

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tqdm\std.py in __iter__(self)
   1179 
   1180         try:
-> 1181             for obj in iterable:
   1182                 yield obj
   1183                 # Update and possibly print the progressbar.

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tensorflow_datasets\image_classification\mnist.py in _generate_examples(self, num_examples, data_path, label_path)
    153       Generator yielding the next examples
    154     """
--> 155     images = _extract_mnist_images(data_path, num_examples)
    156     labels = _extract_mnist_labels(label_path, num_examples)
    157     data = list(zip(images, labels))

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tensorflow_datasets\image_classification\mnist.py in _extract_mnist_images(image_filepath, num_images)
    378 def _extract_mnist_images(image_filepath, num_images):
    379   with tf.io.gfile.GFile(image_filepath, "rb") as f:
--> 380     f.read(16)  # header
    381     buf = f.read(_MNIST_IMAGE_SIZE * _MNIST_IMAGE_SIZE * num_images)
    382     data = np.frombuffer(

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in read(self, n)
    120       string if in string (regular) mode.
    121     """
--> 122     self._preread_check()
    123     if n == -1:
    124       length = self.size() - self.tell()

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tensorflow_core\python\lib\io\file_io.py in _preread_check(self)
     82                                            "File isn't open for reading")
     83       self._read_buf = pywrap_tensorflow.CreateBufferedInputStream(
---> 84           compat.as_bytes(self.__name), 1024 * 512)
     85 
     86   def _prewrite_check(self):

C:\Users\Roshinator\anaconda3\envs\py3-3TF-2.0\lib\site-packages\tensorflow_core\python\util\compat.py in as_bytes(bytes_or_text, encoding)
     85   else:
     86     raise TypeError('Expected binary or unicode string, got %r' %
---> 87                     (bytes_or_text,))
     88 
     89 

TypeError: Expected binary or unicode string, got WindowsGPath('C:\\SPB_DATA\\tensorflow_datasets\\downloads\\extracted\\GZIP.cvdf-datasets_mnist_train-images-idx3-ubyteRA_Kv3PMVG-iFHXoHqNwJlYF9WviEKQCTSyo8gNSNgk.gz')