Cannot load lvis via tfds

am trying to load built-in dataset lvis. It turns out that the tfds and lvis should be imported and installed respectively, however, I did possible all, it still does not work.

import os
import tensorflow as tf

from matplotlib import pyplot as plt
%matplotlib inline

!pip install lvis
!pip install tfds-nightly
import tensorflow_datasets as tfds

train_data, info = tfds.load('lvis', split='train', as_supervised=True, with_info=True)
validation_data = tfds.load('lvis', split='validation', as_supervised=True)
test_data = tfds.load('lvis', split='test', as_supervised=True)

There are some odd outputs after running upon codes in colab.

otFoundError                             Traceback (most recent call last)
/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/core/utils/py_utils.py in try_reraise(*args, **kwargs)
    391   try:
--> 392     yield
    393   except Exception as e:  # pylint: disable=broad-except

15 frames
/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/core/load.py in builder(name, try_gcs, **builder_kwargs)
    167     with py_utils.try_reraise(prefix=f'Failed to construct dataset {name}: '):
--> 168       return cls(**builder_kwargs)  # pytype: disable=not-instantiable
    169 

/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/core/dataset_builder.py in __init__(self, file_format, **kwargs)
    917     """
--> 918     super().__init__(**kwargs)
    919     self.info.set_file_format(file_format)

/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/core/dataset_builder.py in __init__(self, data_dir, config, version)
    184     else:  # Use the code version (do not restore data)
--> 185       self.info.initialize_from_bucket()
    186 

/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/core/utils/py_utils.py in __get__(self, obj, objtype)
    145     if cached is None:
--> 146       cached = self.fget(obj)  # pytype: disable=attribute-error
    147       setattr(obj, attr, cached)

/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/core/dataset_builder.py in info(self)
    328           "the restored dataset.")
--> 329     info = self._info()
    330     if not isinstance(info, dataset_info.DatasetInfo):

/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/object_detection/lvis/lvis.py in _info(self)
     94         names_file=tfds.core.tfds_path(
---> 95             'object_detection/lvis/lvis_classes.txt'))
     96     return tfds.core.DatasetInfo(

/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/core/features/class_label_feature.py in __init__(self, num_classes, names, names_file)
     67     else:
---> 68       self.names = _load_names_from_file(names_file)
     69 

/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/core/features/class_label_feature.py in _load_names_from_file(names_filepath)
    198         name.strip()
--> 199         for name in tf.compat.as_text(f.read()).split("\n")
    200         if name.strip()  # Filter empty names

/usr/local/lib/python3.7/dist-packages/tensorflow/python/lib/io/file_io.py in read(self, n)
    116     """
--> 117     self._preread_check()
    118     if n == -1:

/usr/local/lib/python3.7/dist-packages/tensorflow/python/lib/io/file_io.py in _preread_check(self)
     79       self._read_buf = _pywrap_file_io.BufferedInputStream(
---> 80           compat.path_to_str(self.__name), 1024 * 512)
     81 

NotFoundError: /usr/local/lib/python3.7/dist-packages/tensorflow_datasets/object_detection/lvis/lvis_classes.txt; No such file or directory

The above exception was the direct cause of the following exception:

RuntimeError                              Traceback (most recent call last)
<ipython-input-4-b8c819fe5c62> in <module>()
----> 1 train_data, info = tfds.load('lvis', split='train', as_supervised=True, with_info=True)
      2 validation_data = tfds.load('lvis', split='validation', as_supervised=True)
      3 test_data = tfds.load('lvis', split='test', as_supervised=True)

/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/core/load.py in load(name, split, data_dir, batch_size, shuffle_files, download, as_supervised, decoders, read_config, with_info, builder_kwargs, download_and_prepare_kwargs, as_dataset_kwargs, try_gcs)
    315     builder_kwargs = {}
    316 
--> 317   dbuilder = builder(name, data_dir=data_dir, try_gcs=try_gcs, **builder_kwargs)
    318   if download:
    319     download_and_prepare_kwargs = download_and_prepare_kwargs or {}

/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/core/load.py in builder(name, try_gcs, **builder_kwargs)
    166   if cls:
    167     with py_utils.try_reraise(prefix=f'Failed to construct dataset {name}: '):
--> 168       return cls(**builder_kwargs)  # pytype: disable=not-instantiable
    169 
    170   # If neither the code nor the files are found, raise DatasetNotFoundError

/usr/lib/python3.7/contextlib.py in __exit__(self, type, value, traceback)
    128                 value = type()
    129             try:
--> 130                 self.gen.throw(type, value, traceback)
    131             except StopIteration as exc:
    132                 # Suppress StopIteration *unless* it's the same exception that

/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/core/utils/py_utils.py in try_reraise(*args, **kwargs)
    392     yield
    393   except Exception as e:  # pylint: disable=broad-except
--> 394     reraise(e, *args, **kwargs)
    395 
    396 

/usr/local/lib/python3.7/dist-packages/tensorflow_datasets/core/utils/py_utils.py in reraise(e, prefix, suffix)
    359     else:
    360       exception = RuntimeError(f'{type(e).__name__}: {msg}')
--> 361     raise exception from e
    362   # Otherwise, modify the exception in-place
    363   elif len(e.args) <= 1:

RuntimeError: NotFoundError: Failed to construct dataset lvis: /usr/local/lib/python3.7/dist-packages/tensorflow_datasets/object_detection/lvis/lvis_classes.txt; No such file or directory

Please refer to the workaround as discussed here