Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for hf datasets reader #490

Closed
wants to merge 23 commits into from
Closed
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 56 additions & 0 deletions test/test_huggingface_datasets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import os
import unittest
import warnings

import expecttest

from _utils._common_utils_for_test import create_temp_dir, create_temp_files, reset_after_n_next_calls

from torchdata.datapipes.iter import IterableWrapper, HuggingFaceHubReader

try:
import datasets

HAS_DATASETS = True

except ImportError:
HAS_DATASETS = False
skipIfNoDatasets = unittest.skipIf(not HAS_DATASETS, "no datasets")

class TestHuggingFaceHubReader(expecttest.TestCase):
def setUp(self):
self.temp_dir = create_temp_dir()
self.temp_files = create_temp_files(self.temp_dir)
self.temp_sub_dir = create_temp_dir(self.temp_dir.name)
self.temp_sub_files = create_temp_files(self.temp_sub_dir, 4, False)

self.temp_dir_2 = create_temp_dir()
self.temp_files_2 = create_temp_files(self.temp_dir_2)
self.temp_sub_dir_2 = create_temp_dir(self.temp_dir_2.name)
self.temp_sub_files_2 = create_temp_files(self.temp_sub_dir_2, 4, False)

def tearDown(self):
try:
self.temp_sub_dir.cleanup()
self.temp_dir.cleanup()
self.temp_sub_dir_2.cleanup()
self.temp_dir_2.cleanup()
except Exception as e:
warnings.warn(f"HuggingFace datasets was not able to cleanup temp dir due to {e}")


@skipIfNoDatasets
def test_huggingface_hubreader(self):
datapipe = HuggingFaceHubReader(dataset="lhoestq/demo1", revision="main")

# TODO: add a more useful tests
assert datapipe is not None

if __name__ == "__main__":
unittest.main()
6 changes: 6 additions & 0 deletions torchdata/datapipes/iter/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,11 @@
FSSpecFileOpenerIterDataPipe as FSSpecFileOpener,
FSSpecSaverIterDataPipe as FSSpecSaver,
)

from torchdata.datapipes.iter.load.huggingface import (
HuggingFaceHubReaderIterDataPipe as HuggingFaceHubReader
)

from torchdata.datapipes.iter.load.iopath import (
IoPathFileListerIterDataPipe as IoPathFileLister,
IoPathFileOpenerIterDataPipe as IoPathFileOpener,
Expand Down Expand Up @@ -140,6 +145,7 @@
"FSSpecFileLister",
"FSSpecFileOpener",
"FSSpecSaver",
"HuggingFaceHubReader",
"FileLister",
"FileOpener",
"Filter",
Expand Down
59 changes: 59 additions & 0 deletions torchdata/datapipes/iter/load/huggingface.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

import os
import posixpath

from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Union

from torch.utils.data.datapipes.utils.common import match_masks

from torchdata.datapipes import functional_datapipe
from torchdata.datapipes.iter import IterDataPipe
from torchdata.datapipes.utils import StreamWrapper

try:
import datasets
except ImportError:
datasets = None

def _get_response_from_huggingface_hub(dataset, split, revision, data_files) -> Tuple[Any, StreamWrapper]:
dataset = datasets.load_dataset(dataset, split, revision, data_files)
return dataset[0], StreamWrapper(dataset)

@functional_datapipe("read_from_huggingface_hub")
class HuggingFaceHubReaderIterDataPipe(IterDataPipe[Tuple[str, StreamWrapper]]):
r"""
Takes in dataset names and returns an Iterable HuggingFace dataset
Args:
source_datapipe: a DataPipe that contains dataset names which will be accepted by the HuggingFace datasets library
revision: the specific dataset version
data_files: Optional dict to set custom train/test/validation split
Example:
>>> from torchdata.datapipes.iter import IterableWrapper, HuggingFaceHubReaderIterDataPipe
>>> huggingface_reader_dp = HuggingFaceHubReaderDataPipe(IterableWrapper(["lhoestq/demo1"]), revision="main")
>>> reader_dp = huggingface_reader_dp
>>> it = iter(reader_dp)
>>> path, line = next(it)
>>> path
Add test result here
>>> line
Add test result here b'BSD 3-Clause License'
"""

source_datapipe: IterDataPipe[str]

def __init__(self, dataset: str, *, split : str = "train", revision : Optional[str] = None, data_files : Optional[Dict[str,str]] = None) -> None:
self.dataset = dataset
self.split = split
self.revision = revision
self.data_files = data_files

def __iter__(self) -> Iterator[Tuple[str, StreamWrapper]]:
yield _get_response_from_huggingface_hub(dataset=self.dataset, split=self.split, revision=self.revision, data_files=self.data_files)

def __len__(self) -> int:
return len(self.source_datapipe)