Merge pull request #140 from stanford-oval/dependabot/pip/datasets-1.6.2
Bump datasets from 1.5.0 to 1.6.2
This commit is contained in:
commit
f1e722b049
|
@ -72,22 +72,22 @@ class HFDataset(CQA):
|
||||||
train_data, validation_data, test_data = None, None, None
|
train_data, validation_data, test_data = None, None, None
|
||||||
train_path, validation_path, test_path = None, None, None
|
train_path, validation_path, test_path = None, None, None
|
||||||
if train:
|
if train:
|
||||||
train_data = load_dataset(name, split='train', cache_dir=root)
|
train_data = load_dataset(name, split='train', cache_dir=root, keep_in_memory=False)
|
||||||
train_path = train_data.cache_files[0]['filename']
|
train_path = train_data.cache_files[0]['filename']
|
||||||
if validation:
|
if validation:
|
||||||
validation_data = load_dataset(name, split=validation, cache_dir=root)
|
validation_data = load_dataset(name, split=validation, cache_dir=root, keep_in_memory=False)
|
||||||
validation_path = validation_data.cache_files[0]['filename']
|
validation_path = validation_data.cache_files[0]['filename']
|
||||||
if test:
|
if test:
|
||||||
test_data = load_dataset(name, split='test', cache_dir=root)
|
test_data = load_dataset(name, split='test', cache_dir=root, keep_in_memory=False)
|
||||||
test_path = test_data.cache_files[0]['filename']
|
test_path = test_data.cache_files[0]['filename']
|
||||||
|
|
||||||
if kwargs.pop('hf_test_overfit', False):
|
if kwargs.pop('hf_test_overfit', False):
|
||||||
# override validation/ test data with train data
|
# override validation/ test data with train data
|
||||||
if validation:
|
if validation:
|
||||||
validation_data = load_dataset(name, split='train', cache_dir=root)
|
validation_data = load_dataset(name, split='train', cache_dir=root, keep_in_memory=False)
|
||||||
validation_path = validation_data.cache_files[0]['filename']
|
validation_path = validation_data.cache_files[0]['filename']
|
||||||
if test:
|
if test:
|
||||||
test_data = load_dataset(name, split='train', cache_dir=root)
|
test_data = load_dataset(name, split='train', cache_dir=root, keep_in_memory=False)
|
||||||
test_path = test_data.cache_files[0]['filename']
|
test_path = test_data.cache_files[0]['filename']
|
||||||
|
|
||||||
train_data = None if train is None else cls(train_data, **kwargs)
|
train_data = None if train is None else cls(train_data, **kwargs)
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -54,7 +54,7 @@ setuptools.setup(
|
||||||
'pyrouge>=0.1.3',
|
'pyrouge>=0.1.3',
|
||||||
'sacrebleu~=1.0',
|
'sacrebleu~=1.0',
|
||||||
'requests~=2.22',
|
'requests~=2.22',
|
||||||
'datasets==1.5.0',
|
'datasets==1.6.2',
|
||||||
'seqeval==1.2.2',
|
'seqeval==1.2.2',
|
||||||
'transformers==4.5.1',
|
'transformers==4.5.1',
|
||||||
'sentencepiece==0.1.*',
|
'sentencepiece==0.1.*',
|
||||||
|
|
Loading…
Reference in New Issue