You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
I am trying to run the notebook as it is and getting error mentioned below,
Traceback (most recent call last): File "train.py", line 45, in train_dataset = load_from_disk(args.training_dir) File "/opt/conda/lib/python3.6/site-packages/datasets/load.py", line 797, in load_from_disk return Dataset.load_from_disk(dataset_path, fs, keep_in_memory=keep_in_memory) File "/opt/conda/lib/python3.6/site-packages/datasets/arrow_dataset.py", line 665, in load_from_disk dataset_info = DatasetInfo.from_dict(json.load(dataset_info_file)) File "/opt/conda/lib/python3.6/site-packages/datasets/info.py", line 225, in from_dict return cls({k: v for k, v in dataset_info_dict.items() if k in field_names}) File "", line 18, in init File "/opt/conda/lib/python3.6/site-packages/datasets/info.py", line 137, in post_init self.features = Features.from_dict(self.features) File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 947, in from_dict obj = generate_from_dict(dic) File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 895, in generate_from_dict return {key: generate_from_dict(value) for key, value in obj.items()} File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 895, in return {key: generate_from_dict(value) for key, value in obj.items()} File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 899, in generate_from_dict return Sequence(feature=generate_from_dict(obj["feature"]), length=obj["length"]) | Traceback (most recent call last): File "train.py", line 45, in train_dataset = load_from_disk(args.training_dir) File "/opt/conda/lib/python3.6/site-packages/datasets/load.py", line 797, in load_from_disk return Dataset.load_from_disk(dataset_path, fs, keep_in_memory=keep_in_memory) File "/opt/conda/lib/python3.6/site-packages/datasets/arrow_dataset.py", line 665, in load_from_disk dataset_info = DatasetInfo.from_dict(json.load(dataset_info_file)) File "/opt/conda/lib/python3.6/site-packages/datasets/info.py", line 225, in from_dict return cls({k: v for k, v in dataset_info_dict.items() if k in field_names}) File "", line 18, in init File "/opt/conda/lib/python3.6/site-packages/datasets/info.py", line 137, in post_init self.features = Features.from_dict(self.features) File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 947, in from_dict obj = generate_from_dict(dic) File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 895, in generate_from_dict return {key: generate_from_dict(value) for key, value in obj.items()} File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 895, in return {key: generate_from_dict(value) for key, value in obj.items()} File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 899, in generate_from_dict return Sequence(feature=generate_from_dict(obj["feature"]), length=obj["length"])
KeyError: 'length'
The text was updated successfully, but these errors were encountered:
I am trying to run the notebook as it is and getting error mentioned below,
Traceback (most recent call last): File "train.py", line 45, in train_dataset = load_from_disk(args.training_dir) File "/opt/conda/lib/python3.6/site-packages/datasets/load.py", line 797, in load_from_disk return Dataset.load_from_disk(dataset_path, fs, keep_in_memory=keep_in_memory) File "/opt/conda/lib/python3.6/site-packages/datasets/arrow_dataset.py", line 665, in load_from_disk dataset_info = DatasetInfo.from_dict(json.load(dataset_info_file)) File "/opt/conda/lib/python3.6/site-packages/datasets/info.py", line 225, in from_dict return cls({k: v for k, v in dataset_info_dict.items() if k in field_names}) File "", line 18, in init File "/opt/conda/lib/python3.6/site-packages/datasets/info.py", line 137, in post_init self.features = Features.from_dict(self.features) File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 947, in from_dict obj = generate_from_dict(dic) File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 895, in generate_from_dict return {key: generate_from_dict(value) for key, value in obj.items()} File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 895, in return {key: generate_from_dict(value) for key, value in obj.items()} File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 899, in generate_from_dict return Sequence(feature=generate_from_dict(obj["feature"]), length=obj["length"]) | Traceback (most recent call last): File "train.py", line 45, in train_dataset = load_from_disk(args.training_dir) File "/opt/conda/lib/python3.6/site-packages/datasets/load.py", line 797, in load_from_disk return Dataset.load_from_disk(dataset_path, fs, keep_in_memory=keep_in_memory) File "/opt/conda/lib/python3.6/site-packages/datasets/arrow_dataset.py", line 665, in load_from_disk dataset_info = DatasetInfo.from_dict(json.load(dataset_info_file)) File "/opt/conda/lib/python3.6/site-packages/datasets/info.py", line 225, in from_dict return cls({k: v for k, v in dataset_info_dict.items() if k in field_names}) File "", line 18, in init File "/opt/conda/lib/python3.6/site-packages/datasets/info.py", line 137, in post_init self.features = Features.from_dict(self.features) File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 947, in from_dict obj = generate_from_dict(dic) File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 895, in generate_from_dict return {key: generate_from_dict(value) for key, value in obj.items()} File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 895, in return {key: generate_from_dict(value) for key, value in obj.items()} File "/opt/conda/lib/python3.6/site-packages/datasets/features.py", line 899, in generate_from_dict return Sequence(feature=generate_from_dict(obj["feature"]), length=obj["length"])
KeyError: 'length'
The text was updated successfully, but these errors were encountered: