JSONDecodeError: Expecting value: line 1 column 1 (char 0)
#20
by
texasdave2
- opened
Getting this error when running on prem. I don't get it with other downloaded datasets, just this one...
any ideas?
thanks!!
Repo card metadata block was not found. Setting CardData to empty.
---------------------------------------------------------------------------
JSONDecodeError Traceback (most recent call last)
Cell In[14], line 3
1 from datasets import load_dataset
----> 3 dataset = load_dataset("fka/awesome-chatgpt-prompts")
5 #dataset = "fka/awesome-chatgpt-prompts"
6
7 #Create the Dataset to create prompts.
8 #data = load_dataset(dataset)
9
10 #data = load_dataset('nateraw/parti-prompts', split='train')
14 data = data.map(lambda samples: tokenizer(samples["prompt"]), batched=True)
File ~/anaconda3/envs/pytorch/lib/python3.10/site-packages/datasets/load.py:2129, in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, verification_mode, ignore_verifications, keep_in_memory, save_infos, revision, token, use_auth_token, task, streaming, num_proc, storage_options, **config_kwargs)
2124 verification_mode = VerificationMode(
2125 (verification_mode or VerificationMode.BASIC_CHECKS) if not save_infos else VerificationMode.ALL_CHECKS
2126 )
2128 # Create a dataset builder
-> 2129 builder_instance = load_dataset_builder(
2130 path=path,
2131 name=name,
2132 data_dir=data_dir,
2133 data_files=data_files,
2134 cache_dir=cache_dir,
2135 features=features,
2136 download_config=download_config,
2137 download_mode=download_mode,
2138 revision=revision,
2139 token=token,
2140 storage_options=storage_options,
2141 **config_kwargs,
2142 )
2144 # Return iterable dataset in case of streaming
2145 if streaming:
File ~/anaconda3/envs/pytorch/lib/python3.10/site-packages/datasets/load.py:1852, in load_dataset_builder(path, name, data_dir, data_files, cache_dir, features, download_config, download_mode, revision, token, use_auth_token, storage_options, **config_kwargs)
1850 builder_cls = get_dataset_builder_class(dataset_module, dataset_name=dataset_name)
1851 # Instantiate the dataset builder
-> 1852 builder_instance: DatasetBuilder = builder_cls(
1853 cache_dir=cache_dir,
1854 dataset_name=dataset_name,
1855 config_name=config_name,
1856 data_dir=data_dir,
1857 data_files=data_files,
1858 hash=hash,
1859 info=info,
1860 features=features,
1861 token=token,
1862 storage_options=storage_options,
1863 **builder_kwargs,
1864 **config_kwargs,
1865 )
1867 return builder_instance
File ~/anaconda3/envs/pytorch/lib/python3.10/site-packages/datasets/builder.py:420, in DatasetBuilder.__init__(self, cache_dir, dataset_name, config_name, hash, base_path, info, features, token, use_auth_token, repo_id, data_files, data_dir, storage_options, writer_batch_size, name, **config_kwargs)
418 if os.path.exists(path_join(self._cache_dir, config.DATASET_INFO_FILENAME)):
419 logger.info("Overwrite dataset info from restored data version if exists.")
--> 420 self.info = DatasetInfo.from_directory(self._cache_dir)
421 else: # dir exists but no data, remove the empty dir as data aren't available anymore
422 logger.warning(
423 f"Old caching folder {self._cache_dir} for dataset {self.dataset_name} exists but no data were found. Removing it. "
424 )
File ~/anaconda3/envs/pytorch/lib/python3.10/site-packages/datasets/info.py:360, in DatasetInfo.from_directory(cls, dataset_info_dir, fs, storage_options)
357 path_join = os.path.join if is_local else posixpath.join
359 with fs.open(path_join(dataset_info_dir, config.DATASET_INFO_FILENAME), "r", encoding="utf-8") as f:
--> 360 dataset_info_dict = json.load(f)
361 return cls.from_dict(dataset_info_dict)
File ~/anaconda3/envs/pytorch/lib/python3.10/json/__init__.py:293, in load(fp, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)
274 def load(fp, *, cls=None, object_hook=None, parse_float=None,
275 parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
276 """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
277 a JSON document) to a Python object.
278
(...)
291 kwarg; otherwise ``JSONDecoder`` is used.
292 """
--> 293 return loads(fp.read(),
294 cls=cls, object_hook=object_hook,
295 parse_float=parse_float, parse_int=parse_int,
296 parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw)
File ~/anaconda3/envs/pytorch/lib/python3.10/json/__init__.py:346, in loads(s, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)
341 s = s.decode(detect_encoding(s), 'surrogatepass')
343 if (cls is None and object_hook is None and
344 parse_int is None and parse_float is None and
345 parse_constant is None and object_pairs_hook is None and not kw):
--> 346 return _default_decoder.decode(s)
347 if cls is None:
348 cls = JSONDecoder
File ~/anaconda3/envs/pytorch/lib/python3.10/json/decoder.py:337, in JSONDecoder.decode(self, s, _w)
332 def decode(self, s, _w=WHITESPACE.match):
333 """Return the Python representation of ``s`` (a ``str`` instance
334 containing a JSON document).
335
336 """
--> 337 obj, end = self.raw_decode(s, idx=_w(s, 0).end())
338 end = _w(s, end).end()
339 if end != len(s):
File ~/anaconda3/envs/pytorch/lib/python3.10/json/decoder.py:355, in JSONDecoder.raw_decode(self, s, idx)
353 obj, end = self.scan_once(s, idx)
354 except StopIteration as err:
--> 355 raise JSONDecodeError("Expecting value", s, err.value) from None
356 return obj, end
JSONDecodeError: Expecting value: line 1 column 1 (char 0)